diff --git a/.github/actions/deploy/action.yml b/.github/actions/deploy/action.yml index 40e0d14bcbc..0e4d79181c8 100644 --- a/.github/actions/deploy/action.yml +++ b/.github/actions/deploy/action.yml @@ -64,7 +64,7 @@ runs: - name: Load Docker Images shell: bash run: | - APPS=("apiserver" "driver" "launcher" "scheduledworkflow" "persistenceagent" "frontend" "metadata-writer") + APPS=("apiserver" "driver" "launcher" "scheduledworkflow" "persistenceagent" "frontend") for app in "${APPS[@]}"; do docker image load -i ${{ inputs.image_path }}/$app/$app.tar docker push ${{ inputs.image_registry }}/$app:${{ inputs.image_tag }} diff --git a/.github/actions/kfp-k8s/action.yml b/.github/actions/kfp-k8s/action.yml index 6ba19288473..a047fd1f466 100644 --- a/.github/actions/kfp-k8s/action.yml +++ b/.github/actions/kfp-k8s/action.yml @@ -1,5 +1,5 @@ -name: "Install kfp & kfp-kubernetes" -description: "Install kfp & kfp-kubernetes" +name: "Install kfp-server-api, kfp & kfp-kubernetes" +description: "Install kfp-server-api, kfp & kfp-kubernetes from source" inputs: build_version: required: true @@ -16,6 +16,13 @@ runs: shell: bash run: pip install build==${{inputs.build_version}} + - name: Build kfp-server-api dist + id: build-kfp-server-api + shell: bash + working-directory: backend/api/v2beta1/python_http_client + run: | + python -m build . + - name: Build kfp dist id: install-kfp shell: bash @@ -37,15 +44,15 @@ runs: working-directory: ./kubernetes_platform run: make golang - # kfp is installed transitively - # --find-links ensures pip first looks in the sdk/python/dist folder - # outputted from generate-kfp-kubernetes-proto-files step before looking at pypi + # kfp and kfp-server-api are installed transitively + # --find-links ensures pip first looks in the dist folders before looking at pypi + # for kfp-server-api and kfp packages - name: Install kfp & kfp-kubernetes from source id: install-kfp-kubernetes shell: bash if: ${{ steps.generate-kfp-kubernetes-proto-files.outcome == 'success' }} run: | - pip install -e ./kubernetes_platform/python[dev] --find-links=sdk/python/dist + pip install -e ./kubernetes_platform/python[dev] --find-links=backend/api/v2beta1/python_http_client/dist --find-links=sdk/python/dist # testing reinstalling kfp package from source with no deps - name: Reinstall kfp from source with no deps diff --git a/.github/resources/manifests/base/grpc-specs.yaml b/.github/resources/manifests/base/grpc-specs.yaml deleted file mode 100644 index 446c2d89c2a..00000000000 --- a/.github/resources/manifests/base/grpc-specs.yaml +++ /dev/null @@ -1,38 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: metadata-grpc-deployment -spec: - template: - spec: - dnsPolicy: ClusterFirst - dnsConfig: - searches: - - NAMESPACE.svc.cluster.local - - svc.cluster.local - - cluster.local - options: - - name: timeout - value: "5" - - name: attempts - value: "3" - - name: ndots - value: "2" - containers: - - name: container - env: - - name: POD_NAMESPACE - valueFrom: - fieldRef: - fieldPath: metadata.namespace - - name: MYSQL_HOST - valueFrom: - configMapKeyRef: - name: dns-config - key: dbHost - - name: MYSQL_PORT - valueFrom: - configMapKeyRef: - name: pipeline-install-config - key: mysqlPort - diff --git a/.github/resources/manifests/kubernetes-native/default/kustomization.yaml b/.github/resources/manifests/kubernetes-native/default/kustomization.yaml index 6369d41e73f..4c109b11556 100644 --- a/.github/resources/manifests/kubernetes-native/default/kustomization.yaml +++ b/.github/resources/manifests/kubernetes-native/default/kustomization.yaml @@ -18,19 +18,12 @@ images: - name: ghcr.io/kubeflow/kfp-frontend newName: kind-registry:5000/frontend newTag: latest - - name: ghcr.io/kubeflow/kfp-metadata-writer - newName: kind-registry:5000/metadata-writer - newTag: latest patches: - path: ../../base/apiserver-env.yaml target: kind: Deployment name: ml-pipeline - - path: ../../base/grpc-specs.yaml - target: - kind: Deployment - name: metadata-grpc-deployment - path: ../../base/cache-specs.yaml target: kind: Deployment @@ -47,11 +40,6 @@ replacements: name: ml-pipeline fieldPaths: - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - - select: - kind: Deployment - name: metadata-grpc-deployment - fieldPaths: - - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - select: kind: Deployment name: cache-server diff --git a/.github/resources/manifests/multiuser/artifact-proxy/kustomization.yaml b/.github/resources/manifests/multiuser/artifact-proxy/kustomization.yaml index ef02e240179..b024b2a1d7b 100644 --- a/.github/resources/manifests/multiuser/artifact-proxy/kustomization.yaml +++ b/.github/resources/manifests/multiuser/artifact-proxy/kustomization.yaml @@ -18,19 +18,12 @@ images: - name: ghcr.io/kubeflow/kfp-frontend newName: kind-registry:5000/frontend newTag: latest - - name: ghcr.io/kubeflow/kfp-metadata-writer - newName: kind-registry:5000/metadata-writer - newTag: latest patches: - path: ../../base/apiserver-env.yaml target: kind: Deployment name: ml-pipeline - - path: ../../base/grpc-specs.yaml - target: - kind: Deployment - name: metadata-grpc-deployment - path: ../../base/cache-specs.yaml target: kind: Deployment @@ -63,11 +56,6 @@ replacements: name: ml-pipeline fieldPaths: - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - - select: - kind: Deployment - name: metadata-grpc-deployment - fieldPaths: - - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - select: kind: Deployment name: cache-server diff --git a/.github/resources/manifests/multiuser/cache-disabled/kustomization.yaml b/.github/resources/manifests/multiuser/cache-disabled/kustomization.yaml index 29c5118986e..06e5c1a1e65 100644 --- a/.github/resources/manifests/multiuser/cache-disabled/kustomization.yaml +++ b/.github/resources/manifests/multiuser/cache-disabled/kustomization.yaml @@ -18,10 +18,6 @@ images: - name: ghcr.io/kubeflow/kfp-frontend newName: kind-registry:5000/frontend newTag: latest - - name: ghcr.io/kubeflow/kfp-metadata-writer - newName: kind-registry:5000/metadata-writer - newTag: latest - patches: - path: ../../base/apiserver-env.yaml target: @@ -31,10 +27,6 @@ patches: target: kind: Deployment name: ml-pipeline - - path: ../../base/grpc-specs.yaml - target: - kind: Deployment - name: metadata-grpc-deployment - path: ../../base/cache-specs.yaml target: kind: Deployment @@ -51,11 +43,6 @@ replacements: name: ml-pipeline fieldPaths: - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - - select: - kind: Deployment - name: metadata-grpc-deployment - fieldPaths: - - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - select: kind: Deployment name: cache-server diff --git a/.github/resources/manifests/multiuser/default/kustomization.yaml b/.github/resources/manifests/multiuser/default/kustomization.yaml index ee1f9b244a2..237a0867ff3 100644 --- a/.github/resources/manifests/multiuser/default/kustomization.yaml +++ b/.github/resources/manifests/multiuser/default/kustomization.yaml @@ -18,19 +18,12 @@ images: - name: ghcr.io/kubeflow/kfp-frontend newName: kind-registry:5000/frontend newTag: latest - - name: ghcr.io/kubeflow/kfp-metadata-writer - newName: kind-registry:5000/metadata-writer - newTag: latest patches: - path: ../../base/apiserver-env.yaml target: kind: Deployment name: ml-pipeline - - path: ../../base/grpc-specs.yaml - target: - kind: Deployment - name: metadata-grpc-deployment - path: ../../base/cache-specs.yaml target: kind: Deployment @@ -47,11 +40,6 @@ replacements: name: ml-pipeline fieldPaths: - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - - select: - kind: Deployment - name: metadata-grpc-deployment - fieldPaths: - - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - select: kind: Deployment name: cache-server diff --git a/.github/resources/manifests/multiuser/minio/kustomization.yaml b/.github/resources/manifests/multiuser/minio/kustomization.yaml index 2acbd1a0285..b57515de702 100644 --- a/.github/resources/manifests/multiuser/minio/kustomization.yaml +++ b/.github/resources/manifests/multiuser/minio/kustomization.yaml @@ -18,19 +18,12 @@ images: - name: ghcr.io/kubeflow/kfp-frontend newName: kind-registry:5000/frontend newTag: latest - - name: ghcr.io/kubeflow/kfp-metadata-writer - newName: kind-registry:5000/metadata-writer - newTag: latest patches: - path: ../../base/apiserver-env.yaml target: kind: Deployment name: ml-pipeline - - path: ../../base/grpc-specs.yaml - target: - kind: Deployment - name: metadata-grpc-deployment - path: ../../base/cache-specs.yaml target: kind: Deployment @@ -47,11 +40,6 @@ replacements: name: ml-pipeline fieldPaths: - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - - select: - kind: Deployment - name: metadata-grpc-deployment - fieldPaths: - - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - select: kind: Deployment name: cache-server diff --git a/.github/resources/manifests/standalone/cache-disabled-proxy-minio/apiserver-env.yaml b/.github/resources/manifests/standalone/cache-disabled-proxy-minio/apiserver-env.yaml index 3382d1b8737..35cd06a6576 100644 --- a/.github/resources/manifests/standalone/cache-disabled-proxy-minio/apiserver-env.yaml +++ b/.github/resources/manifests/standalone/cache-disabled-proxy-minio/apiserver-env.yaml @@ -15,4 +15,4 @@ spec: - name: HTTPS_PROXY value: "http://squid.squid.svc.cluster.local:3128" - name: NO_PROXY - value: "localhost,127.0.0.1,.svc.cluster.local,kubernetes.default.svc,minio-service.kubeflow,metadata-grpc-service,metadata-grpc-service.kubeflow,ml-pipeline.kubeflow" + value: "localhost,127.0.0.1,.svc.cluster.local,kubernetes.default.svc,minio-service.kubeflow,ml-pipeline.kubeflow" diff --git a/.github/resources/manifests/standalone/cache-disabled-proxy/apiserver-env.yaml b/.github/resources/manifests/standalone/cache-disabled-proxy/apiserver-env.yaml index 94ca66f5a67..6b613ac5327 100644 --- a/.github/resources/manifests/standalone/cache-disabled-proxy/apiserver-env.yaml +++ b/.github/resources/manifests/standalone/cache-disabled-proxy/apiserver-env.yaml @@ -15,6 +15,6 @@ spec: - name: HTTPS_PROXY value: "http://squid.squid.svc.cluster.local:3128" - name: NO_PROXY - value: "localhost,127.0.0.1,.svc.cluster.local,kubernetes.default.svc,minio-service.kubeflow,metadata-grpc-service,metadata-grpc-service.kubeflow,ml-pipeline.kubeflow" + value: "localhost,127.0.0.1,.svc.cluster.local,kubernetes.default.svc,minio-service.kubeflow,ml-pipeline.kubeflow" - name: OBJECTSTORECONFIG_HOST value: "minio-service.kubeflow.svc.cluster.local" diff --git a/.github/resources/manifests/standalone/cache-disabled/kustomization.yaml b/.github/resources/manifests/standalone/cache-disabled/kustomization.yaml index 7158892a91d..77e95abe7af 100644 --- a/.github/resources/manifests/standalone/cache-disabled/kustomization.yaml +++ b/.github/resources/manifests/standalone/cache-disabled/kustomization.yaml @@ -18,9 +18,6 @@ images: - name: ghcr.io/kubeflow/kfp-frontend newName: kind-registry:5000/frontend newTag: latest - - name: ghcr.io/kubeflow/kfp-metadata-writer - newName: kind-registry:5000/metadata-writer - newTag: latest patches: - path: ../../base/apiserver-env.yaml @@ -31,10 +28,6 @@ patches: target: kind: Deployment name: ml-pipeline - - path: ../../base/grpc-specs.yaml - target: - kind: Deployment - name: metadata-grpc-deployment - path: ../../base/cache-specs.yaml target: kind: Deployment @@ -51,11 +44,6 @@ replacements: name: ml-pipeline fieldPaths: - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - - select: - kind: Deployment - name: metadata-grpc-deployment - fieldPaths: - - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - select: kind: Deployment name: cache-server diff --git a/.github/resources/manifests/standalone/default/kustomization.yaml b/.github/resources/manifests/standalone/default/kustomization.yaml index ba5aaecbe0d..5150d46deb7 100644 --- a/.github/resources/manifests/standalone/default/kustomization.yaml +++ b/.github/resources/manifests/standalone/default/kustomization.yaml @@ -18,19 +18,12 @@ images: - name: ghcr.io/kubeflow/kfp-frontend newName: kind-registry:5000/frontend newTag: latest - - name: ghcr.io/kubeflow/kfp-metadata-writer - newName: kind-registry:5000/metadata-writer - newTag: latest patches: - path: ../../base/apiserver-env.yaml target: kind: Deployment name: ml-pipeline - - path: ../../base/grpc-specs.yaml - target: - kind: Deployment - name: metadata-grpc-deployment - path: ../../base/cache-specs.yaml target: kind: Deployment @@ -47,11 +40,6 @@ replacements: name: ml-pipeline fieldPaths: - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - - select: - kind: Deployment - name: metadata-grpc-deployment - fieldPaths: - - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - select: kind: Deployment name: cache-server diff --git a/.github/resources/manifests/standalone/minio/kustomization.yaml b/.github/resources/manifests/standalone/minio/kustomization.yaml index edb2b3b887a..ab1cacedda4 100644 --- a/.github/resources/manifests/standalone/minio/kustomization.yaml +++ b/.github/resources/manifests/standalone/minio/kustomization.yaml @@ -18,9 +18,6 @@ images: - name: ghcr.io/kubeflow/kfp-frontend newName: kind-registry:5000/frontend newTag: latest - - name: ghcr.io/kubeflow/kfp-metadata-writer - newName: kind-registry:5000/metadata-writer - newTag: latest patches: - path: ../../base/apiserver-env.yaml @@ -31,10 +28,6 @@ patches: target: kind: Deployment name: ml-pipeline - - path: ../../base/grpc-specs.yaml - target: - kind: Deployment - name: metadata-grpc-deployment - path: ../../base/cache-specs.yaml target: kind: Deployment @@ -51,11 +44,6 @@ replacements: name: ml-pipeline fieldPaths: - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - - select: - kind: Deployment - name: metadata-grpc-deployment - fieldPaths: - - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - select: kind: Deployment name: cache-server diff --git a/.github/resources/manifests/standalone/proxy-minio/apiserver-env.yaml b/.github/resources/manifests/standalone/proxy-minio/apiserver-env.yaml index ff97e60b468..ba57dd93b97 100644 --- a/.github/resources/manifests/standalone/proxy-minio/apiserver-env.yaml +++ b/.github/resources/manifests/standalone/proxy-minio/apiserver-env.yaml @@ -13,6 +13,6 @@ spec: - name: HTTPS_PROXY value: "http://squid.squid.svc.cluster.local:3128" - name: NO_PROXY - value: "localhost,127.0.0.1,.svc.cluster.local,kubernetes.default.svc,minio-service.kubeflow,metadata-grpc-service,metadata-grpc-service.kubeflow,ml-pipeline.kubeflow" + value: "localhost,127.0.0.1,.svc.cluster.local,kubernetes.default.svc,minio-service.kubeflow,ml-pipeline.kubeflow" - name: OBJECTSTORECONFIG_HOST value: "minio-service.kubeflow.svc.cluster.local" diff --git a/.github/resources/manifests/standalone/proxy-minio/kustomization.yaml b/.github/resources/manifests/standalone/proxy-minio/kustomization.yaml index ec0e59fa60e..a11b17296ca 100644 --- a/.github/resources/manifests/standalone/proxy-minio/kustomization.yaml +++ b/.github/resources/manifests/standalone/proxy-minio/kustomization.yaml @@ -18,9 +18,6 @@ images: - name: ghcr.io/kubeflow/kfp-frontend newName: kind-registry:5000/frontend newTag: latest - - name: ghcr.io/kubeflow/kfp-metadata-writer - newName: kind-registry:5000/metadata-writer - newTag: latest patches: - path: ../../base/apiserver-env.yaml @@ -31,10 +28,6 @@ patches: target: kind: Deployment name: ml-pipeline - - path: ../../base/grpc-specs.yaml - target: - kind: Deployment - name: metadata-grpc-deployment - path: ../../base/cache-specs.yaml target: kind: Deployment @@ -51,11 +44,6 @@ replacements: name: ml-pipeline fieldPaths: - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - - select: - kind: Deployment - name: metadata-grpc-deployment - fieldPaths: - - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - select: kind: Deployment name: cache-server diff --git a/.github/resources/manifests/standalone/proxy/apiserver-env.yaml b/.github/resources/manifests/standalone/proxy/apiserver-env.yaml index a48345e8c23..fddc105eabf 100644 --- a/.github/resources/manifests/standalone/proxy/apiserver-env.yaml +++ b/.github/resources/manifests/standalone/proxy/apiserver-env.yaml @@ -13,4 +13,4 @@ spec: - name: HTTPS_PROXY value: "http://squid.squid.svc.cluster.local:3128" - name: NO_PROXY - value: "localhost,127.0.0.1,.svc.cluster.local,kubernetes.default.svc,mysql,mysql.kubeflow,minio-service.kubeflow,metadata-grpc-service,metadata-grpc-service.kubeflow,ml-pipeline.kubeflow" + value: "localhost,127.0.0.1,.svc.cluster.local,kubernetes.default.svc,mysql,mysql.kubeflow,minio-service.kubeflow,ml-pipeline.kubeflow" diff --git a/.github/resources/manifests/standalone/proxy/kustomization.yaml b/.github/resources/manifests/standalone/proxy/kustomization.yaml index bbb63b263ca..65995dd1a08 100644 --- a/.github/resources/manifests/standalone/proxy/kustomization.yaml +++ b/.github/resources/manifests/standalone/proxy/kustomization.yaml @@ -18,9 +18,6 @@ images: - name: ghcr.io/kubeflow/kfp-frontend newName: kind-registry:5000/frontend newTag: latest - - name: ghcr.io/kubeflow/kfp-metadata-writer - newName: kind-registry:5000/metadata-writer - newTag: latest patches: - path: ../../base/apiserver-env.yaml @@ -31,10 +28,6 @@ patches: target: kind: Deployment name: ml-pipeline - - path: ../../base/grpc-specs.yaml - target: - kind: Deployment - name: metadata-grpc-deployment - path: ../../base/cache-specs.yaml target: kind: Deployment @@ -51,11 +44,6 @@ replacements: name: ml-pipeline fieldPaths: - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - - select: - kind: Deployment - name: metadata-grpc-deployment - fieldPaths: - - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - select: kind: Deployment name: cache-server diff --git a/.github/resources/manifests/standalone/tls-enabled/kustomization.yaml b/.github/resources/manifests/standalone/tls-enabled/kustomization.yaml index aa65eef4fc1..7e0bcc469fe 100644 --- a/.github/resources/manifests/standalone/tls-enabled/kustomization.yaml +++ b/.github/resources/manifests/standalone/tls-enabled/kustomization.yaml @@ -18,10 +18,6 @@ images: - name: ghcr.io/kubeflow/kfp-frontend newName: kind-registry:5000/frontend newTag: latest - - name: ghcr.io/kubeflow/kfp-metadata-writer - newName: kind-registry:5000/metadata-writer - newTag: latest - patches: - path: ../../base/apiserver-env.yaml target: @@ -31,10 +27,6 @@ patches: target: kind: Deployment name: ml-pipeline - - path: ../../base/grpc-specs.yaml - target: - kind: Deployment - name: metadata-grpc-deployment - path: ../../base/cache-specs.yaml target: kind: Deployment @@ -51,11 +43,6 @@ replacements: name: ml-pipeline fieldPaths: - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - - select: - kind: Deployment - name: metadata-grpc-deployment - fieldPaths: - - spec.template.spec.dnsConfig.searches.[=NAMESPACE.svc.cluster.local] - select: kind: Deployment name: cache-server diff --git a/.github/workflows/api-server-tests.yml b/.github/workflows/api-server-tests.yml index 6656f2d5794..84078b6dcf7 100644 --- a/.github/workflows/api-server-tests.yml +++ b/.github/workflows/api-server-tests.yml @@ -39,7 +39,6 @@ on: - '.github/resources/**' - 'backend/api/v2beta1/**' - 'backend/src/**' - - 'backend/metadata_writer/**' - 'backend/test/v2/api/**' - 'manifests/kustomize/**' - '../../test_data/sdk_compiled_pipelines/**' diff --git a/.github/workflows/image-builds-master.yml b/.github/workflows/image-builds-master.yml index 45c389bfd7c..978148a9f85 100644 --- a/.github/workflows/image-builds-master.yml +++ b/.github/workflows/image-builds-master.yml @@ -39,12 +39,6 @@ jobs: - image: kfp-cache-server dockerfile: backend/Dockerfile.cacheserver context: . - - image: kfp-metadata-writer - dockerfile: backend/metadata_writer/Dockerfile - context: . - - image: kfp-metadata-envoy - dockerfile: third_party/metadata_envoy/Dockerfile - context: . - image: kfp-inverse-proxy-agent dockerfile: proxy/Dockerfile context: ./proxy diff --git a/.github/workflows/image-builds-release.yml b/.github/workflows/image-builds-release.yml index 776a5975f5f..0e941a33bda 100644 --- a/.github/workflows/image-builds-release.yml +++ b/.github/workflows/image-builds-release.yml @@ -63,12 +63,6 @@ jobs: - image: kfp-cache-server dockerfile: backend/Dockerfile.cacheserver context: . - - image: kfp-metadata-writer - dockerfile: backend/metadata_writer/Dockerfile - context: . - - image: kfp-metadata-envoy - dockerfile: third_party/metadata_envoy/Dockerfile - context: . - image: kfp-inverse-proxy-agent dockerfile: proxy/Dockerfile context: ./proxy diff --git a/.github/workflows/image-builds-with-cache.yml b/.github/workflows/image-builds-with-cache.yml index eb36902868b..cb01cddb1fe 100644 --- a/.github/workflows/image-builds-with-cache.yml +++ b/.github/workflows/image-builds-with-cache.yml @@ -47,9 +47,6 @@ jobs: dockerfile: frontend/Dockerfile context: . - - image: metadata-writer - dockerfile: backend/metadata_writer/Dockerfile - context: . env: ARTIFACT_NAME: "${{ matrix.image }}" ARTIFACTS_PATH: "images_${{ github.sha }}" diff --git a/.github/workflows/kfp-sdk-client-tests.yml b/.github/workflows/kfp-sdk-client-tests.yml index fe460812232..ec0f4ed52d9 100644 --- a/.github/workflows/kfp-sdk-client-tests.yml +++ b/.github/workflows/kfp-sdk-client-tests.yml @@ -74,6 +74,9 @@ jobs: pip install pytest pip install pytest-cov + - name: Install kfp-server-api from source + run: pip install -e backend/api/v2beta1/python_http_client + - name: Run tests id: tests if: ${{ steps.forward-api-port.outcome == 'success' }} diff --git a/.github/workflows/legacy-v2-api-integration-tests.yml b/.github/workflows/legacy-v2-api-integration-tests.yml index 70710ee601f..cea9081d4b5 100644 --- a/.github/workflows/legacy-v2-api-integration-tests.yml +++ b/.github/workflows/legacy-v2-api-integration-tests.yml @@ -73,15 +73,9 @@ jobs: kubectl get secret kfp-api-tls-cert -n kubeflow -o jsonpath='{.data.ca\.crt}' | base64 -d > "${{ github.workspace }}/ca.crt" echo "CA_CERT_PATH=${{ github.workspace }}/ca.crt" >> "$GITHUB_ENV" - - name: Forward MLMD port - id: forward-mlmd-port - if: ${{ steps.deploy.outcome == 'success' }} - run: kubectl -n kubeflow port-forward svc/metadata-grpc-service 8080:8080 & - continue-on-error: true - - name: API integration tests v2 id: tests - if: ${{ steps.forward-mlmd-port.outcome == 'success' }} + if: ${{ steps.deploy.outcome == 'success' }} working-directory: ./backend/test/v2/integration run: go test -v ./... -args -runIntegrationTests=true -namespace=kubeflow -tlsEnabled=${{ matrix.pod_to_pod_tls_enabled }} -caCertPath=${{ env.CA_CERT_PATH }} env: @@ -90,7 +84,7 @@ jobs: continue-on-error: true - name: Collect failed logs - if: ${{ steps.deploy.outcome != 'success' || steps.forward-mlmd-port.outcome != 'success' || steps.tests.outcome != 'success' }} + if: ${{ steps.deploy.outcome != 'success' || steps.tests.outcome != 'success' }} run: | ./.github/resources/scripts/collect-logs.sh --ns kubeflow --output /tmp/tmp_pod_log.txt exit 1 diff --git a/.github/workflows/upgrade-test.yml b/.github/workflows/upgrade-test.yml index ea03fa54399..5b6a81916e8 100644 --- a/.github/workflows/upgrade-test.yml +++ b/.github/workflows/upgrade-test.yml @@ -19,7 +19,6 @@ on: - '.github/resources/**' - 'backend/api/v2beta1/**' - 'backend/src/**' - - 'backend/metadata_writer/**' - 'backend/test/v2/api/**' - 'manifests/kustomize/**' - '!**/*.md' diff --git a/.golangci.yaml b/.golangci.yaml index b00124ac151..0ccc7ed4463 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -6,6 +6,7 @@ run: issues: max-same-issues: 0 + linters: default: none enable: @@ -27,6 +28,8 @@ linters: staticcheck: checks: - "all" + # Ignore failures on deprecated usage + - "-SA1019" formatters: enable: diff --git a/api/v2alpha1/go/pipelinespec/pipeline_spec.pb.go b/api/v2alpha1/go/pipelinespec/pipeline_spec.pb.go index e91a354bb7d..ff10b60e0b9 100644 --- a/api/v2alpha1/go/pipelinespec/pipeline_spec.pb.go +++ b/api/v2alpha1/go/pipelinespec/pipeline_spec.pb.go @@ -2067,7 +2067,9 @@ type RuntimeArtifact struct { // Properties of the Artifact. Metadata *structpb.Struct `protobuf:"bytes,6,opt,name=metadata,proto3" json:"metadata,omitempty"` // Custom path for output artifact. - CustomPath *string `protobuf:"bytes,7,opt,name=custom_path,json=customPath,proto3,oneof" json:"custom_path,omitempty"` + CustomPath *string `protobuf:"bytes,7,opt,name=custom_path,json=customPath,proto3,oneof" json:"custom_path,omitempty"` + // The unique server generated id of the artifact. + ArtifactId string `protobuf:"bytes,8,opt,name=artifact_id,json=artifactId,proto3" json:"artifact_id,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -2153,6 +2155,13 @@ func (x *RuntimeArtifact) GetCustomPath() string { return "" } +func (x *RuntimeArtifact) GetArtifactId() string { + if x != nil { + return x.ArtifactId + } + return "" +} + // Message that represents a list of artifacts. type ArtifactList struct { state protoimpl.MessageState `protogen:"open.v1"` @@ -5979,7 +5988,7 @@ const file_pipeline_spec_proto_rawDesc = "" + "\tint_value\x18\x01 \x01(\x03H\x00R\bintValue\x12#\n" + "\fdouble_value\x18\x02 \x01(\x01H\x00R\vdoubleValue\x12#\n" + "\fstring_value\x18\x03 \x01(\tH\x00R\vstringValueB\a\n" + - "\x05value\"\xbf\x04\n" + + "\x05value\"\xe0\x04\n" + "\x0fRuntimeArtifact\x12\x12\n" + "\x04name\x18\x01 \x01(\tR\x04name\x124\n" + "\x04type\x18\x02 \x01(\v2 .ml_pipelines.ArtifactTypeSchemaR\x04type\x12\x10\n" + @@ -5990,7 +5999,9 @@ const file_pipeline_spec_proto_rawDesc = "" + "\x11custom_properties\x18\x05 \x03(\v23.ml_pipelines.RuntimeArtifact.CustomPropertiesEntryB\x02\x18\x01R\x10customProperties\x123\n" + "\bmetadata\x18\x06 \x01(\v2\x17.google.protobuf.StructR\bmetadata\x12$\n" + "\vcustom_path\x18\a \x01(\tH\x00R\n" + - "customPath\x88\x01\x01\x1aR\n" + + "customPath\x88\x01\x01\x12\x1f\n" + + "\vartifact_id\x18\b \x01(\tR\n" + + "artifactId\x1aR\n" + "\x0fPropertiesEntry\x12\x10\n" + "\x03key\x18\x01 \x01(\tR\x03key\x12)\n" + "\x05value\x18\x02 \x01(\v2\x13.ml_pipelines.ValueR\x05value:\x028\x01\x1aX\n" + diff --git a/api/v2alpha1/pipeline_spec.proto b/api/v2alpha1/pipeline_spec.proto index eced0271fcb..19c50b167d5 100644 --- a/api/v2alpha1/pipeline_spec.proto +++ b/api/v2alpha1/pipeline_spec.proto @@ -970,6 +970,9 @@ message RuntimeArtifact { // Custom path for output artifact. optional string custom_path = 7; + + // The unique server generated id of the artifact. + string artifact_id = 8; } // Message that represents a list of artifacts. diff --git a/backend/api/Makefile b/backend/api/Makefile index f104d2ccaf8..b554042b233 100644 --- a/backend/api/Makefile +++ b/backend/api/Makefile @@ -26,12 +26,15 @@ RELEASE_IMAGE=ghcr.io/kubeflow/kfp-release:master CONTAINER_ENGINE ?= docker # Generate clients using a pre-built api-generator image. +# Note that: +# :Z is the standard SELinux-friendly way to run containers against your dev tree. +# :Z will set the repo on disk being SE labeled container_file_t while you’re working .PHONY: generate generate: fetch-dependencies hack/generator.sh $(API_VERSION)/*.proto ${CONTAINER_ENGINE} run --interactive --rm \ -e API_VERSION=$(API_VERSION) \ --user $$(id -u):$$(id -g) \ - --mount type=bind,source="$$(pwd)/../..",target=/go/src/github.com/kubeflow/pipelines \ + -v "$$(pwd)/../..":/go/src/github.com/kubeflow/pipelines:Z \ $(PREBUILT_REMOTE_IMAGE) /go/src/github.com/kubeflow/pipelines/backend/api/hack/generator.sh # Use the release image since it has some additional dependencies diff --git a/backend/api/hack/generator.sh b/backend/api/hack/generator.sh index 45fc482a345..e3bbedc0a6d 100755 --- a/backend/api/hack/generator.sh +++ b/backend/api/hack/generator.sh @@ -154,6 +154,16 @@ swagger generate client \ -c healthz_client \ -m healthz_model \ -t backend/api/${API_VERSION}/go_http_client +# Generate artifact HTTP client for v2beta1 +if [[ "$API_VERSION" == "v2beta1" ]]; then + swagger generate client \ + -f backend/api/${API_VERSION}/swagger/artifact.swagger.json \ + -A artifact \ + --principal models.Principal \ + -c artifact_client \ + -m artifact_model \ + -t backend/api/${API_VERSION}/go_http_client +fi # Hack to fix an issue with go-swagger # See https://github.com/go-swagger/go-swagger/issues/1381 for details. diff --git a/backend/api/v2beta1/artifact.proto b/backend/api/v2beta1/artifact.proto new file mode 100644 index 00000000000..efb15d4f0ef --- /dev/null +++ b/backend/api/v2beta1/artifact.proto @@ -0,0 +1,375 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +option go_package = "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client"; +package kubeflow.pipelines.backend.api.v2beta1; + +import "google/api/annotations.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/struct.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; + +option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { + schemes: [1, 2], // http + https + responses: { + key: "default"; + value: { + schema: { + json_schema: { + ref: ".google.rpc.Status"; + } + } + } + } + // Use bearer token for authorizing access to artifact service. + // Kubernetes client library(https://kubernetes.io/docs/reference/using-api/client-libraries/) + // uses bearer token as default for authorization. The section below + // ensures security definition object is generated in the swagger definition. + // For more details see https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#securityDefinitionsObject + security_definitions: { + security: { + key: "Bearer"; + value: { + type: TYPE_API_KEY; + in: IN_HEADER; + name: "Authorization"; + } + } + } +}; + +service ArtifactService { + // Finds all artifacts within the specified namespace. + rpc ListArtifacts(ListArtifactRequest) returns (ListArtifactResponse) { + option (google.api.http) = { + get: "/apis/v2beta1/artifacts" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "list_artifacts" + summary: "Finds all artifacts within the specified namespace." + tags: "ArtifactService" + }; + } + + // Finds a specific Artifact by ID. + rpc GetArtifact(GetArtifactRequest) returns (Artifact) { + option (google.api.http) = { + get: "/apis/v2beta1/artifacts/{artifact_id}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "get_artifact" + summary: "Finds a specific Artifact by ID." + tags: "ArtifactService" + }; + } + + // Creates a new artifact. + rpc CreateArtifact(CreateArtifactRequest) returns (Artifact) { + option (google.api.http) = { + post: "/apis/v2beta1/artifacts" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "create_artifact" + summary: "Creates a new artifact." + tags: "ArtifactService" + }; + } + + rpc CreateArtifactsBulk(CreateArtifactsBulkRequest) returns (CreateArtifactsBulkResponse) { + option (google.api.http) = { + post: "/apis/v2beta1/artifacts:batchCreate" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "batch_create_artifacts" + summary: "Creates multiple artifacts in bulk." + tags: "ArtifactService" + }; + } + + // List ArtifactTasks. + rpc ListArtifactTasks(ListArtifactTasksRequest) returns (ListArtifactTasksResponse) { + option (google.api.http) = { + get: "/apis/v2beta1/artifact_tasks" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "list_artifact_tasks" + summary: "Lists artifact-task relationships." + tags: "ArtifactService" + }; + } + + // Creates an artifact-task relationship. + // While we always create an artifact-task link when an artifact is created, + // In the case of Importer, we only create a link (and not an artifact) + // if Reimport = false. + rpc CreateArtifactTask(CreateArtifactTaskRequest) returns (ArtifactTask) { + option (google.api.http) = { + post: "/apis/v2beta1/artifact_tasks" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "create_artifact_task" + summary: "Creates an artifact-task relationship." + tags: "ArtifactService" + }; + } + + // Creates multiple artifact-task relationships in bulk. + rpc CreateArtifactTasksBulk(CreateArtifactTasksBulkRequest) returns (CreateArtifactTasksBulkResponse) { + option (google.api.http) = { + post: "/apis/v2beta1/artifact_tasks:batchCreate" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "batch_create_artifact_tasks" + summary: "Creates multiple artifact-task relationships in bulk." + tags: "ArtifactService" + }; + } +} + +message CreateArtifactRequest { + // Required. The artifact to create. + Artifact artifact = 1; + + // An artifact is always created in the context of a + // run. + string run_id = 2; + // The Task that is associated with the creation of this artifact. + string task_id = 3; + + // The outgoing parameter name of this Artifact within this task's component spec. + // For example: + // def preprocess(my_output: dsl.Outputs[dsl.Artifact]): + // ... + // here the producer_key == "my_output" + // Note that producer_task_name == task_name + string producer_key = 5; + + // If the producing task is in a parallelFor iteration + // this field designates the iteration index + optional int64 iteration_index = 6; + + IOType type = 7; +} + +message CreateArtifactsBulkRequest { + // Required. The list of artifacts to create. + repeated CreateArtifactRequest artifacts = 1; +} + +message CreateArtifactsBulkResponse { + // The list of created artifacts. + repeated Artifact artifacts = 1; +} + +message GetArtifactRequest { + // Required. The ID of the artifact to be retrieved. + string artifact_id = 1; +} + +message ListArtifactRequest { + // Optional input. Namespace for the artifacts. + string namespace = 1; + + // A page token to request the results page. + string page_token = 2; + + // The number of artifacts to be listed per page. If there are more artifacts + // than this number, the response message will contain a valid value in the + // nextPageToken field. + int32 page_size = 3; + + // Sorting order in form of "field_name", "field_name asc" or "field_name desc". + // Ascending by default. + string sort_by = 4; + + // A url-encoded, JSON-serialized filter protocol buffer (see + // [filter.proto](https://github.com/kubeflow/artifacts/blob/master/backend/api/filter.proto)). + string filter = 5; +} + +message ListArtifactResponse { + // The list of artifacts returned. + repeated Artifact artifacts = 1; + + // The total number of artifacts available. This field is not always populated. + int32 total_size = 2; + + // A token to retrieve the next page of results, or empty if there are no + // more results in the list. + string next_page_token = 3; +} + +message ListArtifactTasksRequest { + // Optional, filter artifact task by a set of task_ids + repeated string task_ids = 1; + // Optional, filter artifact task by a set of run_ids + repeated string run_ids = 2; + // Optional, filter artifact task by a set of artifact_ids + repeated string artifact_ids = 3; + + // Optional. Only list artifact tasks that have artifacts of this type. + IOType type = 4; + + string page_token = 5; + int32 page_size = 6; + string sort_by = 7; + string filter = 8; +} + +message ListArtifactTasksResponse { + repeated ArtifactTask artifact_tasks = 1; + int32 total_size = 2; + string next_page_token = 3; +} + +// Request to create an artifact-task relationship +message CreateArtifactTaskRequest { + // Required. The artifact-task relationship to create. + ArtifactTask artifact_task = 1; +} + +message CreateArtifactTasksBulkRequest { + // Required. The list of artifact-task relationships to create. + repeated ArtifactTask artifact_tasks = 1; +} + +message CreateArtifactTasksBulkResponse { + // The list of created artifact-task relationships. + repeated ArtifactTask artifact_tasks = 1; +} + +// Describes the I/O relationship between +// Artifacts/Parameters and Tasks. +// There are a couple of instances where +// input/outputs have special types such +// as in the case of LoopArguments or +// dsl.Collected outputs. +enum IOType { + // For validation + UNSPECIFIED = 0; + // This is used for inputs that are + // provided via default parameters in + // the component input definitions + COMPONENT_DEFAULT_INPUT = 1; + // This is used for inputs that are + // provided via upstream tasks. + // In the sdk this appears as: + // TaskInputsSpec.kind.task_output_parameter + // & TaskInputsSpec.kind.task_output_artifact + TASK_OUTPUT_INPUT = 2; + // Used for inputs that are + // passed from parent tasks. + COMPONENT_INPUT = 3; + // Hardcoded values passed + // as arguments to the task. + RUNTIME_VALUE_INPUT = 4; + // Used for dsl.Collected + // Usage of this type indicates that all + // Artifacts within the IOArtifact.artifacts + // are inputs collected from sub tasks with + // ITERATOR_OUTPUT outputs. + COLLECTED_INPUTS = 5; + // In a for loop task, introduced via ParallelFor, this type + // is used to indicate whether this resolved input belongs + // to a parameterIterator or artifactIterator. + // In such a case the "artifacts" field for IOArtifact.artifacts + // is the list of resolved items for this parallelFor. + ITERATOR_INPUT = 6; + // Hardcoded iterator parameters. + // Raw Iterator inputs have no producer + ITERATOR_INPUT_RAW = 7; + // When an output is produced by a Runtime Iteration Task + // This value is use to differentiate between standard inputs + ITERATOR_OUTPUT = 8; + // All other output types fall under this type. + OUTPUT = 9; + // An output of a Conditions branch. + ONE_OF_OUTPUT = 10; + TASK_FINAL_STATUS_OUTPUT = 11; +} + +message IOProducer { + string task_name = 1; + // When a source is from an iteration Runtime + // task type inside a ParallelFor + optional int64 iteration = 2; +} + +// Describes a relationship link between Artifacts and Tasks +message ArtifactTask { + // Output only. The unique server generated id of the ArtifactTask. + string id = 1; + string artifact_id = 2; + string run_id = 3; + string task_id = 4; + IOType type = 5; + IOProducer producer = 6; + string key = 7; +} + +// Not to be confused with RuntimeArtifact in PipelineSpec +message Artifact { + // Output only. The unique server generated id of the artifact. + // Note: Updated id name to be consistent with other api naming patterns (with prefix) + string artifact_id = 1; + + // Required. The client provided name of the artifact. + // Note: in MLMD when name was set, it had to be unique for that type_id + // this restriction is removed here + // If this is a "Metric" artifact, the name of the metric + // is treated as the Key in its K/V pair. + string name = 2; + + string description = 3; + + enum ArtifactType { + // default; treated as "not set" + // reject if unset. + TYPE_UNSPECIFIED = 0; + + Artifact = 1; + Model = 2; + Dataset = 3; + HTML = 4; + Markdown = 5; + + Metric = 6; + ClassificationMetric = 7; + SlicedClassificationMetric = 8; + } + // Required. The name of an ArtifactType. E.g. Dataset + ArtifactType type = 4; + + // The uniform resource identifier of the physical artifact. + // May be empty if there is no physical artifact. + optional string uri = 5; + + // Optional. User provided custom properties which are not defined by its type. + map metadata = 6; + + // Used primarily for metrics + optional double number_value = 7; + + // Output only. Create time of the artifact in millisecond since epoch. + // Note: The type and name is updated from mlmd artifact to be consistent with other backend apis. + google.protobuf.Timestamp created_at = 8; + + string namespace = 9; +} \ No newline at end of file diff --git a/backend/api/v2beta1/go_client/artifact.pb.go b/backend/api/v2beta1/go_client/artifact.pb.go new file mode 100644 index 00000000000..2f7ef2ea0dd --- /dev/null +++ b/backend/api/v2beta1/go_client/artifact.pb.go @@ -0,0 +1,1405 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.36.6 +// protoc v6.31.1 +// source: backend/api/v2beta1/artifact.proto + +package go_client + +import ( + _ "github.com/grpc-ecosystem/grpc-gateway/v2/protoc-gen-openapiv2/options" + _ "google.golang.org/genproto/googleapis/api/annotations" + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + structpb "google.golang.org/protobuf/types/known/structpb" + timestamppb "google.golang.org/protobuf/types/known/timestamppb" + reflect "reflect" + sync "sync" + unsafe "unsafe" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +// Describes the I/O relationship between +// Artifacts/Parameters and Tasks. +// There are a couple of instances where +// input/outputs have special types such +// as in the case of LoopArguments or +// dsl.Collected outputs. +type IOType int32 + +const ( + // For validation + IOType_UNSPECIFIED IOType = 0 + // This is used for inputs that are + // provided via default parameters in + // the component input definitions + IOType_COMPONENT_DEFAULT_INPUT IOType = 1 + // This is used for inputs that are + // provided via upstream tasks. + // In the sdk this appears as: + // TaskInputsSpec.kind.task_output_parameter + // & TaskInputsSpec.kind.task_output_artifact + IOType_TASK_OUTPUT_INPUT IOType = 2 + // Used for inputs that are + // passed from parent tasks. + IOType_COMPONENT_INPUT IOType = 3 + // Hardcoded values passed + // as arguments to the task. + IOType_RUNTIME_VALUE_INPUT IOType = 4 + // Used for dsl.Collected + // Usage of this type indicates that all + // Artifacts within the IOArtifact.artifacts + // are inputs collected from sub tasks with + // ITERATOR_OUTPUT outputs. + IOType_COLLECTED_INPUTS IOType = 5 + // In a for loop task, introduced via ParallelFor, this type + // is used to indicate whether this resolved input belongs + // to a parameterIterator or artifactIterator. + // In such a case the "artifacts" field for IOArtifact.artifacts + // is the list of resolved items for this parallelFor. + IOType_ITERATOR_INPUT IOType = 6 + // Hardcoded iterator parameters. + // Raw Iterator inputs have no producer + IOType_ITERATOR_INPUT_RAW IOType = 7 + // When an output is produced by a Runtime Iteration Task + // This value is use to differentiate between standard inputs + IOType_ITERATOR_OUTPUT IOType = 8 + // All other output types fall under this type. + IOType_OUTPUT IOType = 9 + // An output of a Conditions branch. + IOType_ONE_OF_OUTPUT IOType = 10 + IOType_TASK_FINAL_STATUS_OUTPUT IOType = 11 +) + +// Enum value maps for IOType. +var ( + IOType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "COMPONENT_DEFAULT_INPUT", + 2: "TASK_OUTPUT_INPUT", + 3: "COMPONENT_INPUT", + 4: "RUNTIME_VALUE_INPUT", + 5: "COLLECTED_INPUTS", + 6: "ITERATOR_INPUT", + 7: "ITERATOR_INPUT_RAW", + 8: "ITERATOR_OUTPUT", + 9: "OUTPUT", + 10: "ONE_OF_OUTPUT", + 11: "TASK_FINAL_STATUS_OUTPUT", + } + IOType_value = map[string]int32{ + "UNSPECIFIED": 0, + "COMPONENT_DEFAULT_INPUT": 1, + "TASK_OUTPUT_INPUT": 2, + "COMPONENT_INPUT": 3, + "RUNTIME_VALUE_INPUT": 4, + "COLLECTED_INPUTS": 5, + "ITERATOR_INPUT": 6, + "ITERATOR_INPUT_RAW": 7, + "ITERATOR_OUTPUT": 8, + "OUTPUT": 9, + "ONE_OF_OUTPUT": 10, + "TASK_FINAL_STATUS_OUTPUT": 11, + } +) + +func (x IOType) Enum() *IOType { + p := new(IOType) + *p = x + return p +} + +func (x IOType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (IOType) Descriptor() protoreflect.EnumDescriptor { + return file_backend_api_v2beta1_artifact_proto_enumTypes[0].Descriptor() +} + +func (IOType) Type() protoreflect.EnumType { + return &file_backend_api_v2beta1_artifact_proto_enumTypes[0] +} + +func (x IOType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use IOType.Descriptor instead. +func (IOType) EnumDescriptor() ([]byte, []int) { + return file_backend_api_v2beta1_artifact_proto_rawDescGZIP(), []int{0} +} + +type Artifact_ArtifactType int32 + +const ( + // default; treated as "not set" + // reject if unset. + Artifact_TYPE_UNSPECIFIED Artifact_ArtifactType = 0 + Artifact_Artifact Artifact_ArtifactType = 1 + Artifact_Model Artifact_ArtifactType = 2 + Artifact_Dataset Artifact_ArtifactType = 3 + Artifact_HTML Artifact_ArtifactType = 4 + Artifact_Markdown Artifact_ArtifactType = 5 + Artifact_Metric Artifact_ArtifactType = 6 + Artifact_ClassificationMetric Artifact_ArtifactType = 7 + Artifact_SlicedClassificationMetric Artifact_ArtifactType = 8 +) + +// Enum value maps for Artifact_ArtifactType. +var ( + Artifact_ArtifactType_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "Artifact", + 2: "Model", + 3: "Dataset", + 4: "HTML", + 5: "Markdown", + 6: "Metric", + 7: "ClassificationMetric", + 8: "SlicedClassificationMetric", + } + Artifact_ArtifactType_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "Artifact": 1, + "Model": 2, + "Dataset": 3, + "HTML": 4, + "Markdown": 5, + "Metric": 6, + "ClassificationMetric": 7, + "SlicedClassificationMetric": 8, + } +) + +func (x Artifact_ArtifactType) Enum() *Artifact_ArtifactType { + p := new(Artifact_ArtifactType) + *p = x + return p +} + +func (x Artifact_ArtifactType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Artifact_ArtifactType) Descriptor() protoreflect.EnumDescriptor { + return file_backend_api_v2beta1_artifact_proto_enumTypes[1].Descriptor() +} + +func (Artifact_ArtifactType) Type() protoreflect.EnumType { + return &file_backend_api_v2beta1_artifact_proto_enumTypes[1] +} + +func (x Artifact_ArtifactType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Artifact_ArtifactType.Descriptor instead. +func (Artifact_ArtifactType) EnumDescriptor() ([]byte, []int) { + return file_backend_api_v2beta1_artifact_proto_rawDescGZIP(), []int{13, 0} +} + +type CreateArtifactRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Required. The artifact to create. + Artifact *Artifact `protobuf:"bytes,1,opt,name=artifact,proto3" json:"artifact,omitempty"` + // An artifact is always created in the context of a + // run. + RunId string `protobuf:"bytes,2,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + // The Task that is associated with the creation of this artifact. + TaskId string `protobuf:"bytes,3,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + // The outgoing parameter name of this Artifact within this task's component spec. + // For example: + // def preprocess(my_output: dsl.Outputs[dsl.Artifact]): + // + // ... + // + // here the producer_key == "my_output" + // Note that producer_task_name == task_name + ProducerKey string `protobuf:"bytes,5,opt,name=producer_key,json=producerKey,proto3" json:"producer_key,omitempty"` + // If the producing task is in a parallelFor iteration + // this field designates the iteration index + IterationIndex *int64 `protobuf:"varint,6,opt,name=iteration_index,json=iterationIndex,proto3,oneof" json:"iteration_index,omitempty"` + Type IOType `protobuf:"varint,7,opt,name=type,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.IOType" json:"type,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *CreateArtifactRequest) Reset() { + *x = CreateArtifactRequest{} + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *CreateArtifactRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateArtifactRequest) ProtoMessage() {} + +func (x *CreateArtifactRequest) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateArtifactRequest.ProtoReflect.Descriptor instead. +func (*CreateArtifactRequest) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_artifact_proto_rawDescGZIP(), []int{0} +} + +func (x *CreateArtifactRequest) GetArtifact() *Artifact { + if x != nil { + return x.Artifact + } + return nil +} + +func (x *CreateArtifactRequest) GetRunId() string { + if x != nil { + return x.RunId + } + return "" +} + +func (x *CreateArtifactRequest) GetTaskId() string { + if x != nil { + return x.TaskId + } + return "" +} + +func (x *CreateArtifactRequest) GetProducerKey() string { + if x != nil { + return x.ProducerKey + } + return "" +} + +func (x *CreateArtifactRequest) GetIterationIndex() int64 { + if x != nil && x.IterationIndex != nil { + return *x.IterationIndex + } + return 0 +} + +func (x *CreateArtifactRequest) GetType() IOType { + if x != nil { + return x.Type + } + return IOType_UNSPECIFIED +} + +type CreateArtifactsBulkRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Required. The list of artifacts to create. + Artifacts []*CreateArtifactRequest `protobuf:"bytes,1,rep,name=artifacts,proto3" json:"artifacts,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *CreateArtifactsBulkRequest) Reset() { + *x = CreateArtifactsBulkRequest{} + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *CreateArtifactsBulkRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateArtifactsBulkRequest) ProtoMessage() {} + +func (x *CreateArtifactsBulkRequest) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateArtifactsBulkRequest.ProtoReflect.Descriptor instead. +func (*CreateArtifactsBulkRequest) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_artifact_proto_rawDescGZIP(), []int{1} +} + +func (x *CreateArtifactsBulkRequest) GetArtifacts() []*CreateArtifactRequest { + if x != nil { + return x.Artifacts + } + return nil +} + +type CreateArtifactsBulkResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + // The list of created artifacts. + Artifacts []*Artifact `protobuf:"bytes,1,rep,name=artifacts,proto3" json:"artifacts,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *CreateArtifactsBulkResponse) Reset() { + *x = CreateArtifactsBulkResponse{} + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *CreateArtifactsBulkResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateArtifactsBulkResponse) ProtoMessage() {} + +func (x *CreateArtifactsBulkResponse) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateArtifactsBulkResponse.ProtoReflect.Descriptor instead. +func (*CreateArtifactsBulkResponse) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_artifact_proto_rawDescGZIP(), []int{2} +} + +func (x *CreateArtifactsBulkResponse) GetArtifacts() []*Artifact { + if x != nil { + return x.Artifacts + } + return nil +} + +type GetArtifactRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Required. The ID of the artifact to be retrieved. + ArtifactId string `protobuf:"bytes,1,opt,name=artifact_id,json=artifactId,proto3" json:"artifact_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GetArtifactRequest) Reset() { + *x = GetArtifactRequest{} + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetArtifactRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetArtifactRequest) ProtoMessage() {} + +func (x *GetArtifactRequest) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[3] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetArtifactRequest.ProtoReflect.Descriptor instead. +func (*GetArtifactRequest) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_artifact_proto_rawDescGZIP(), []int{3} +} + +func (x *GetArtifactRequest) GetArtifactId() string { + if x != nil { + return x.ArtifactId + } + return "" +} + +type ListArtifactRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Optional input. Namespace for the artifacts. + Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"` + // A page token to request the results page. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The number of artifacts to be listed per page. If there are more artifacts + // than this number, the response message will contain a valid value in the + // nextPageToken field. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Sorting order in form of "field_name", "field_name asc" or "field_name desc". + // Ascending by default. + SortBy string `protobuf:"bytes,4,opt,name=sort_by,json=sortBy,proto3" json:"sort_by,omitempty"` + // A url-encoded, JSON-serialized filter protocol buffer (see + // [filter.proto](https://github.com/kubeflow/artifacts/blob/master/backend/api/filter.proto)). + Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ListArtifactRequest) Reset() { + *x = ListArtifactRequest{} + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ListArtifactRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListArtifactRequest) ProtoMessage() {} + +func (x *ListArtifactRequest) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[4] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListArtifactRequest.ProtoReflect.Descriptor instead. +func (*ListArtifactRequest) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_artifact_proto_rawDescGZIP(), []int{4} +} + +func (x *ListArtifactRequest) GetNamespace() string { + if x != nil { + return x.Namespace + } + return "" +} + +func (x *ListArtifactRequest) GetPageToken() string { + if x != nil { + return x.PageToken + } + return "" +} + +func (x *ListArtifactRequest) GetPageSize() int32 { + if x != nil { + return x.PageSize + } + return 0 +} + +func (x *ListArtifactRequest) GetSortBy() string { + if x != nil { + return x.SortBy + } + return "" +} + +func (x *ListArtifactRequest) GetFilter() string { + if x != nil { + return x.Filter + } + return "" +} + +type ListArtifactResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + // The list of artifacts returned. + Artifacts []*Artifact `protobuf:"bytes,1,rep,name=artifacts,proto3" json:"artifacts,omitempty"` + // The total number of artifacts available. This field is not always populated. + TotalSize int32 `protobuf:"varint,2,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` + // A token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ListArtifactResponse) Reset() { + *x = ListArtifactResponse{} + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ListArtifactResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListArtifactResponse) ProtoMessage() {} + +func (x *ListArtifactResponse) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[5] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListArtifactResponse.ProtoReflect.Descriptor instead. +func (*ListArtifactResponse) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_artifact_proto_rawDescGZIP(), []int{5} +} + +func (x *ListArtifactResponse) GetArtifacts() []*Artifact { + if x != nil { + return x.Artifacts + } + return nil +} + +func (x *ListArtifactResponse) GetTotalSize() int32 { + if x != nil { + return x.TotalSize + } + return 0 +} + +func (x *ListArtifactResponse) GetNextPageToken() string { + if x != nil { + return x.NextPageToken + } + return "" +} + +type ListArtifactTasksRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Optional, filter artifact task by a set of task_ids + TaskIds []string `protobuf:"bytes,1,rep,name=task_ids,json=taskIds,proto3" json:"task_ids,omitempty"` + // Optional, filter artifact task by a set of run_ids + RunIds []string `protobuf:"bytes,2,rep,name=run_ids,json=runIds,proto3" json:"run_ids,omitempty"` + // Optional, filter artifact task by a set of artifact_ids + ArtifactIds []string `protobuf:"bytes,3,rep,name=artifact_ids,json=artifactIds,proto3" json:"artifact_ids,omitempty"` + // Optional. Only list artifact tasks that have artifacts of this type. + Type IOType `protobuf:"varint,4,opt,name=type,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.IOType" json:"type,omitempty"` + PageToken string `protobuf:"bytes,5,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + PageSize int32 `protobuf:"varint,6,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + SortBy string `protobuf:"bytes,7,opt,name=sort_by,json=sortBy,proto3" json:"sort_by,omitempty"` + Filter string `protobuf:"bytes,8,opt,name=filter,proto3" json:"filter,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ListArtifactTasksRequest) Reset() { + *x = ListArtifactTasksRequest{} + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ListArtifactTasksRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListArtifactTasksRequest) ProtoMessage() {} + +func (x *ListArtifactTasksRequest) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[6] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListArtifactTasksRequest.ProtoReflect.Descriptor instead. +func (*ListArtifactTasksRequest) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_artifact_proto_rawDescGZIP(), []int{6} +} + +func (x *ListArtifactTasksRequest) GetTaskIds() []string { + if x != nil { + return x.TaskIds + } + return nil +} + +func (x *ListArtifactTasksRequest) GetRunIds() []string { + if x != nil { + return x.RunIds + } + return nil +} + +func (x *ListArtifactTasksRequest) GetArtifactIds() []string { + if x != nil { + return x.ArtifactIds + } + return nil +} + +func (x *ListArtifactTasksRequest) GetType() IOType { + if x != nil { + return x.Type + } + return IOType_UNSPECIFIED +} + +func (x *ListArtifactTasksRequest) GetPageToken() string { + if x != nil { + return x.PageToken + } + return "" +} + +func (x *ListArtifactTasksRequest) GetPageSize() int32 { + if x != nil { + return x.PageSize + } + return 0 +} + +func (x *ListArtifactTasksRequest) GetSortBy() string { + if x != nil { + return x.SortBy + } + return "" +} + +func (x *ListArtifactTasksRequest) GetFilter() string { + if x != nil { + return x.Filter + } + return "" +} + +type ListArtifactTasksResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + ArtifactTasks []*ArtifactTask `protobuf:"bytes,1,rep,name=artifact_tasks,json=artifactTasks,proto3" json:"artifact_tasks,omitempty"` + TotalSize int32 `protobuf:"varint,2,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` + NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ListArtifactTasksResponse) Reset() { + *x = ListArtifactTasksResponse{} + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ListArtifactTasksResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListArtifactTasksResponse) ProtoMessage() {} + +func (x *ListArtifactTasksResponse) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[7] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListArtifactTasksResponse.ProtoReflect.Descriptor instead. +func (*ListArtifactTasksResponse) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_artifact_proto_rawDescGZIP(), []int{7} +} + +func (x *ListArtifactTasksResponse) GetArtifactTasks() []*ArtifactTask { + if x != nil { + return x.ArtifactTasks + } + return nil +} + +func (x *ListArtifactTasksResponse) GetTotalSize() int32 { + if x != nil { + return x.TotalSize + } + return 0 +} + +func (x *ListArtifactTasksResponse) GetNextPageToken() string { + if x != nil { + return x.NextPageToken + } + return "" +} + +// Request to create an artifact-task relationship +type CreateArtifactTaskRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Required. The artifact-task relationship to create. + ArtifactTask *ArtifactTask `protobuf:"bytes,1,opt,name=artifact_task,json=artifactTask,proto3" json:"artifact_task,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *CreateArtifactTaskRequest) Reset() { + *x = CreateArtifactTaskRequest{} + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *CreateArtifactTaskRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateArtifactTaskRequest) ProtoMessage() {} + +func (x *CreateArtifactTaskRequest) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[8] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateArtifactTaskRequest.ProtoReflect.Descriptor instead. +func (*CreateArtifactTaskRequest) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_artifact_proto_rawDescGZIP(), []int{8} +} + +func (x *CreateArtifactTaskRequest) GetArtifactTask() *ArtifactTask { + if x != nil { + return x.ArtifactTask + } + return nil +} + +type CreateArtifactTasksBulkRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Required. The list of artifact-task relationships to create. + ArtifactTasks []*ArtifactTask `protobuf:"bytes,1,rep,name=artifact_tasks,json=artifactTasks,proto3" json:"artifact_tasks,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *CreateArtifactTasksBulkRequest) Reset() { + *x = CreateArtifactTasksBulkRequest{} + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *CreateArtifactTasksBulkRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateArtifactTasksBulkRequest) ProtoMessage() {} + +func (x *CreateArtifactTasksBulkRequest) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[9] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateArtifactTasksBulkRequest.ProtoReflect.Descriptor instead. +func (*CreateArtifactTasksBulkRequest) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_artifact_proto_rawDescGZIP(), []int{9} +} + +func (x *CreateArtifactTasksBulkRequest) GetArtifactTasks() []*ArtifactTask { + if x != nil { + return x.ArtifactTasks + } + return nil +} + +type CreateArtifactTasksBulkResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + // The list of created artifact-task relationships. + ArtifactTasks []*ArtifactTask `protobuf:"bytes,1,rep,name=artifact_tasks,json=artifactTasks,proto3" json:"artifact_tasks,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *CreateArtifactTasksBulkResponse) Reset() { + *x = CreateArtifactTasksBulkResponse{} + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *CreateArtifactTasksBulkResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CreateArtifactTasksBulkResponse) ProtoMessage() {} + +func (x *CreateArtifactTasksBulkResponse) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[10] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CreateArtifactTasksBulkResponse.ProtoReflect.Descriptor instead. +func (*CreateArtifactTasksBulkResponse) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_artifact_proto_rawDescGZIP(), []int{10} +} + +func (x *CreateArtifactTasksBulkResponse) GetArtifactTasks() []*ArtifactTask { + if x != nil { + return x.ArtifactTasks + } + return nil +} + +type IOProducer struct { + state protoimpl.MessageState `protogen:"open.v1"` + TaskName string `protobuf:"bytes,1,opt,name=task_name,json=taskName,proto3" json:"task_name,omitempty"` + // When a source is from an iteration Runtime + // task type inside a ParallelFor + Iteration *int64 `protobuf:"varint,2,opt,name=iteration,proto3,oneof" json:"iteration,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *IOProducer) Reset() { + *x = IOProducer{} + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *IOProducer) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*IOProducer) ProtoMessage() {} + +func (x *IOProducer) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[11] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use IOProducer.ProtoReflect.Descriptor instead. +func (*IOProducer) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_artifact_proto_rawDescGZIP(), []int{11} +} + +func (x *IOProducer) GetTaskName() string { + if x != nil { + return x.TaskName + } + return "" +} + +func (x *IOProducer) GetIteration() int64 { + if x != nil && x.Iteration != nil { + return *x.Iteration + } + return 0 +} + +// Describes a relationship link between Artifacts and Tasks +type ArtifactTask struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Output only. The unique server generated id of the ArtifactTask. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + ArtifactId string `protobuf:"bytes,2,opt,name=artifact_id,json=artifactId,proto3" json:"artifact_id,omitempty"` + RunId string `protobuf:"bytes,3,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + TaskId string `protobuf:"bytes,4,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + Type IOType `protobuf:"varint,5,opt,name=type,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.IOType" json:"type,omitempty"` + Producer *IOProducer `protobuf:"bytes,6,opt,name=producer,proto3" json:"producer,omitempty"` + Key string `protobuf:"bytes,7,opt,name=key,proto3" json:"key,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ArtifactTask) Reset() { + *x = ArtifactTask{} + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ArtifactTask) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ArtifactTask) ProtoMessage() {} + +func (x *ArtifactTask) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[12] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ArtifactTask.ProtoReflect.Descriptor instead. +func (*ArtifactTask) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_artifact_proto_rawDescGZIP(), []int{12} +} + +func (x *ArtifactTask) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +func (x *ArtifactTask) GetArtifactId() string { + if x != nil { + return x.ArtifactId + } + return "" +} + +func (x *ArtifactTask) GetRunId() string { + if x != nil { + return x.RunId + } + return "" +} + +func (x *ArtifactTask) GetTaskId() string { + if x != nil { + return x.TaskId + } + return "" +} + +func (x *ArtifactTask) GetType() IOType { + if x != nil { + return x.Type + } + return IOType_UNSPECIFIED +} + +func (x *ArtifactTask) GetProducer() *IOProducer { + if x != nil { + return x.Producer + } + return nil +} + +func (x *ArtifactTask) GetKey() string { + if x != nil { + return x.Key + } + return "" +} + +// Not to be confused with RuntimeArtifact in PipelineSpec +type Artifact struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Output only. The unique server generated id of the artifact. + // Note: Updated id name to be consistent with other api naming patterns (with prefix) + ArtifactId string `protobuf:"bytes,1,opt,name=artifact_id,json=artifactId,proto3" json:"artifact_id,omitempty"` + // Required. The client provided name of the artifact. + // Note: in MLMD when name was set, it had to be unique for that type_id + // this restriction is removed here + // If this is a "Metric" artifact, the name of the metric + // is treated as the Key in its K/V pair. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Required. The name of an ArtifactType. E.g. Dataset + Type Artifact_ArtifactType `protobuf:"varint,4,opt,name=type,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.Artifact_ArtifactType" json:"type,omitempty"` + // The uniform resource identifier of the physical artifact. + // May be empty if there is no physical artifact. + Uri *string `protobuf:"bytes,5,opt,name=uri,proto3,oneof" json:"uri,omitempty"` + // Optional. User provided custom properties which are not defined by its type. + Metadata map[string]*structpb.Value `protobuf:"bytes,6,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + // Used primarily for metrics + NumberValue *float64 `protobuf:"fixed64,7,opt,name=number_value,json=numberValue,proto3,oneof" json:"number_value,omitempty"` + // Output only. Create time of the artifact in millisecond since epoch. + // Note: The type and name is updated from mlmd artifact to be consistent with other backend apis. + CreatedAt *timestamppb.Timestamp `protobuf:"bytes,8,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` + Namespace string `protobuf:"bytes,9,opt,name=namespace,proto3" json:"namespace,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *Artifact) Reset() { + *x = Artifact{} + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *Artifact) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Artifact) ProtoMessage() {} + +func (x *Artifact) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_artifact_proto_msgTypes[13] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Artifact.ProtoReflect.Descriptor instead. +func (*Artifact) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_artifact_proto_rawDescGZIP(), []int{13} +} + +func (x *Artifact) GetArtifactId() string { + if x != nil { + return x.ArtifactId + } + return "" +} + +func (x *Artifact) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Artifact) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *Artifact) GetType() Artifact_ArtifactType { + if x != nil { + return x.Type + } + return Artifact_TYPE_UNSPECIFIED +} + +func (x *Artifact) GetUri() string { + if x != nil && x.Uri != nil { + return *x.Uri + } + return "" +} + +func (x *Artifact) GetMetadata() map[string]*structpb.Value { + if x != nil { + return x.Metadata + } + return nil +} + +func (x *Artifact) GetNumberValue() float64 { + if x != nil && x.NumberValue != nil { + return *x.NumberValue + } + return 0 +} + +func (x *Artifact) GetCreatedAt() *timestamppb.Timestamp { + if x != nil { + return x.CreatedAt + } + return nil +} + +func (x *Artifact) GetNamespace() string { + if x != nil { + return x.Namespace + } + return "" +} + +var File_backend_api_v2beta1_artifact_proto protoreflect.FileDescriptor + +const file_backend_api_v2beta1_artifact_proto_rawDesc = "" + + "\n" + + "\"backend/api/v2beta1/artifact.proto\x12&kubeflow.pipelines.backend.api.v2beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\"\xbe\x02\n" + + "\x15CreateArtifactRequest\x12L\n" + + "\bartifact\x18\x01 \x01(\v20.kubeflow.pipelines.backend.api.v2beta1.ArtifactR\bartifact\x12\x15\n" + + "\x06run_id\x18\x02 \x01(\tR\x05runId\x12\x17\n" + + "\atask_id\x18\x03 \x01(\tR\x06taskId\x12!\n" + + "\fproducer_key\x18\x05 \x01(\tR\vproducerKey\x12,\n" + + "\x0fiteration_index\x18\x06 \x01(\x03H\x00R\x0eiterationIndex\x88\x01\x01\x12B\n" + + "\x04type\x18\a \x01(\x0e2..kubeflow.pipelines.backend.api.v2beta1.IOTypeR\x04typeB\x12\n" + + "\x10_iteration_index\"y\n" + + "\x1aCreateArtifactsBulkRequest\x12[\n" + + "\tartifacts\x18\x01 \x03(\v2=.kubeflow.pipelines.backend.api.v2beta1.CreateArtifactRequestR\tartifacts\"m\n" + + "\x1bCreateArtifactsBulkResponse\x12N\n" + + "\tartifacts\x18\x01 \x03(\v20.kubeflow.pipelines.backend.api.v2beta1.ArtifactR\tartifacts\"5\n" + + "\x12GetArtifactRequest\x12\x1f\n" + + "\vartifact_id\x18\x01 \x01(\tR\n" + + "artifactId\"\xa0\x01\n" + + "\x13ListArtifactRequest\x12\x1c\n" + + "\tnamespace\x18\x01 \x01(\tR\tnamespace\x12\x1d\n" + + "\n" + + "page_token\x18\x02 \x01(\tR\tpageToken\x12\x1b\n" + + "\tpage_size\x18\x03 \x01(\x05R\bpageSize\x12\x17\n" + + "\asort_by\x18\x04 \x01(\tR\x06sortBy\x12\x16\n" + + "\x06filter\x18\x05 \x01(\tR\x06filter\"\xad\x01\n" + + "\x14ListArtifactResponse\x12N\n" + + "\tartifacts\x18\x01 \x03(\v20.kubeflow.pipelines.backend.api.v2beta1.ArtifactR\tartifacts\x12\x1d\n" + + "\n" + + "total_size\x18\x02 \x01(\x05R\ttotalSize\x12&\n" + + "\x0fnext_page_token\x18\x03 \x01(\tR\rnextPageToken\"\xa2\x02\n" + + "\x18ListArtifactTasksRequest\x12\x19\n" + + "\btask_ids\x18\x01 \x03(\tR\ataskIds\x12\x17\n" + + "\arun_ids\x18\x02 \x03(\tR\x06runIds\x12!\n" + + "\fartifact_ids\x18\x03 \x03(\tR\vartifactIds\x12B\n" + + "\x04type\x18\x04 \x01(\x0e2..kubeflow.pipelines.backend.api.v2beta1.IOTypeR\x04type\x12\x1d\n" + + "\n" + + "page_token\x18\x05 \x01(\tR\tpageToken\x12\x1b\n" + + "\tpage_size\x18\x06 \x01(\x05R\bpageSize\x12\x17\n" + + "\asort_by\x18\a \x01(\tR\x06sortBy\x12\x16\n" + + "\x06filter\x18\b \x01(\tR\x06filter\"\xbf\x01\n" + + "\x19ListArtifactTasksResponse\x12[\n" + + "\x0eartifact_tasks\x18\x01 \x03(\v24.kubeflow.pipelines.backend.api.v2beta1.ArtifactTaskR\rartifactTasks\x12\x1d\n" + + "\n" + + "total_size\x18\x02 \x01(\x05R\ttotalSize\x12&\n" + + "\x0fnext_page_token\x18\x03 \x01(\tR\rnextPageToken\"v\n" + + "\x19CreateArtifactTaskRequest\x12Y\n" + + "\rartifact_task\x18\x01 \x01(\v24.kubeflow.pipelines.backend.api.v2beta1.ArtifactTaskR\fartifactTask\"}\n" + + "\x1eCreateArtifactTasksBulkRequest\x12[\n" + + "\x0eartifact_tasks\x18\x01 \x03(\v24.kubeflow.pipelines.backend.api.v2beta1.ArtifactTaskR\rartifactTasks\"~\n" + + "\x1fCreateArtifactTasksBulkResponse\x12[\n" + + "\x0eartifact_tasks\x18\x01 \x03(\v24.kubeflow.pipelines.backend.api.v2beta1.ArtifactTaskR\rartifactTasks\"Z\n" + + "\n" + + "IOProducer\x12\x1b\n" + + "\ttask_name\x18\x01 \x01(\tR\btaskName\x12!\n" + + "\titeration\x18\x02 \x01(\x03H\x00R\titeration\x88\x01\x01B\f\n" + + "\n" + + "_iteration\"\x95\x02\n" + + "\fArtifactTask\x12\x0e\n" + + "\x02id\x18\x01 \x01(\tR\x02id\x12\x1f\n" + + "\vartifact_id\x18\x02 \x01(\tR\n" + + "artifactId\x12\x15\n" + + "\x06run_id\x18\x03 \x01(\tR\x05runId\x12\x17\n" + + "\atask_id\x18\x04 \x01(\tR\x06taskId\x12B\n" + + "\x04type\x18\x05 \x01(\x0e2..kubeflow.pipelines.backend.api.v2beta1.IOTypeR\x04type\x12N\n" + + "\bproducer\x18\x06 \x01(\v22.kubeflow.pipelines.backend.api.v2beta1.IOProducerR\bproducer\x12\x10\n" + + "\x03key\x18\a \x01(\tR\x03key\"\xc1\x05\n" + + "\bArtifact\x12\x1f\n" + + "\vartifact_id\x18\x01 \x01(\tR\n" + + "artifactId\x12\x12\n" + + "\x04name\x18\x02 \x01(\tR\x04name\x12 \n" + + "\vdescription\x18\x03 \x01(\tR\vdescription\x12Q\n" + + "\x04type\x18\x04 \x01(\x0e2=.kubeflow.pipelines.backend.api.v2beta1.Artifact.ArtifactTypeR\x04type\x12\x15\n" + + "\x03uri\x18\x05 \x01(\tH\x00R\x03uri\x88\x01\x01\x12Z\n" + + "\bmetadata\x18\x06 \x03(\v2>.kubeflow.pipelines.backend.api.v2beta1.Artifact.MetadataEntryR\bmetadata\x12&\n" + + "\fnumber_value\x18\a \x01(\x01H\x01R\vnumberValue\x88\x01\x01\x129\n" + + "\n" + + "created_at\x18\b \x01(\v2\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x1c\n" + + "\tnamespace\x18\t \x01(\tR\tnamespace\x1aS\n" + + "\rMetadataEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12,\n" + + "\x05value\x18\x02 \x01(\v2\x16.google.protobuf.ValueR\x05value:\x028\x01\"\xa8\x01\n" + + "\fArtifactType\x12\x14\n" + + "\x10TYPE_UNSPECIFIED\x10\x00\x12\f\n" + + "\bArtifact\x10\x01\x12\t\n" + + "\x05Model\x10\x02\x12\v\n" + + "\aDataset\x10\x03\x12\b\n" + + "\x04HTML\x10\x04\x12\f\n" + + "\bMarkdown\x10\x05\x12\n" + + "\n" + + "\x06Metric\x10\x06\x12\x18\n" + + "\x14ClassificationMetric\x10\a\x12\x1e\n" + + "\x1aSlicedClassificationMetric\x10\bB\x06\n" + + "\x04_uriB\x0f\n" + + "\r_number_value*\x8f\x02\n" + + "\x06IOType\x12\x0f\n" + + "\vUNSPECIFIED\x10\x00\x12\x1b\n" + + "\x17COMPONENT_DEFAULT_INPUT\x10\x01\x12\x15\n" + + "\x11TASK_OUTPUT_INPUT\x10\x02\x12\x13\n" + + "\x0fCOMPONENT_INPUT\x10\x03\x12\x17\n" + + "\x13RUNTIME_VALUE_INPUT\x10\x04\x12\x14\n" + + "\x10COLLECTED_INPUTS\x10\x05\x12\x12\n" + + "\x0eITERATOR_INPUT\x10\x06\x12\x16\n" + + "\x12ITERATOR_INPUT_RAW\x10\a\x12\x13\n" + + "\x0fITERATOR_OUTPUT\x10\b\x12\n" + + "\n" + + "\x06OUTPUT\x10\t\x12\x11\n" + + "\rONE_OF_OUTPUT\x10\n" + + "\x12\x1c\n" + + "\x18TASK_FINAL_STATUS_OUTPUT\x10\v2\xf5\x0e\n" + + "\x0fArtifactService\x12\x84\x02\n" + + "\rListArtifacts\x12;.kubeflow.pipelines.backend.api.v2beta1.ListArtifactRequest\x1a<.kubeflow.pipelines.backend.api.v2beta1.ListArtifactResponse\"x\x92AV\n" + + "\x0fArtifactService\x123Finds all artifacts within the specified namespace.*\x0elist_artifacts\x82\xd3\xe4\x93\x02\x19\x12\x17/apis/v2beta1/artifacts\x12\xee\x01\n" + + "\vGetArtifact\x12:.kubeflow.pipelines.backend.api.v2beta1.GetArtifactRequest\x1a0.kubeflow.pipelines.backend.api.v2beta1.Artifact\"q\x92AA\n" + + "\x0fArtifactService\x12 Finds a specific Artifact by ID.*\fget_artifact\x82\xd3\xe4\x93\x02'\x12%/apis/v2beta1/artifacts/{artifact_id}\x12\xe3\x01\n" + + "\x0eCreateArtifact\x12=.kubeflow.pipelines.backend.api.v2beta1.CreateArtifactRequest\x1a0.kubeflow.pipelines.backend.api.v2beta1.Artifact\"`\x92A;\n" + + "\x0fArtifactService\x12\x17Creates a new artifact.*\x0fcreate_artifact\x82\xd3\xe4\x93\x02\x1c:\x01*\"\x17/apis/v2beta1/artifacts\x12\x9f\x02\n" + + "\x13CreateArtifactsBulk\x12B.kubeflow.pipelines.backend.api.v2beta1.CreateArtifactsBulkRequest\x1aC.kubeflow.pipelines.backend.api.v2beta1.CreateArtifactsBulkResponse\"\x7f\x92AN\n" + + "\x0fArtifactService\x12#Creates multiple artifacts in bulk.*\x16batch_create_artifacts\x82\xd3\xe4\x93\x02(:\x01*\"#/apis/v2beta1/artifacts:batchCreate\x12\x8b\x02\n" + + "\x11ListArtifactTasks\x12@.kubeflow.pipelines.backend.api.v2beta1.ListArtifactTasksRequest\x1aA.kubeflow.pipelines.backend.api.v2beta1.ListArtifactTasksResponse\"q\x92AJ\n" + + "\x0fArtifactService\x12\"Lists artifact-task relationships.*\x13list_artifact_tasks\x82\xd3\xe4\x93\x02\x1e\x12\x1c/apis/v2beta1/artifact_tasks\x12\x88\x02\n" + + "\x12CreateArtifactTask\x12A.kubeflow.pipelines.backend.api.v2beta1.CreateArtifactTaskRequest\x1a4.kubeflow.pipelines.backend.api.v2beta1.ArtifactTask\"y\x92AO\n" + + "\x0fArtifactService\x12&Creates an artifact-task relationship.*\x14create_artifact_task\x82\xd3\xe4\x93\x02!:\x01*\"\x1c/apis/v2beta1/artifact_tasks\x12\xc8\x02\n" + + "\x17CreateArtifactTasksBulk\x12F.kubeflow.pipelines.backend.api.v2beta1.CreateArtifactTasksBulkRequest\x1aG.kubeflow.pipelines.backend.api.v2beta1.CreateArtifactTasksBulkResponse\"\x9b\x01\x92Ae\n" + + "\x0fArtifactService\x125Creates multiple artifact-task relationships in bulk.*\x1bbatch_create_artifact_tasks\x82\xd3\xe4\x93\x02-:\x01*\"(/apis/v2beta1/artifact_tasks:batchCreateB\x8a\x01\x92AJ*\x02\x01\x02R#\n" + + "\adefault\x12\x18\x12\x16\n" + + "\x14\x1a\x12.google.rpc.StatusZ\x1f\n" + + "\x1d\n" + + "\x06Bearer\x12\x13\b\x02\x1a\rAuthorization \x02Z;github.com/kubeflow/pipelines/backend/api/v2beta1/go_clientb\x06proto3" + +var ( + file_backend_api_v2beta1_artifact_proto_rawDescOnce sync.Once + file_backend_api_v2beta1_artifact_proto_rawDescData []byte +) + +func file_backend_api_v2beta1_artifact_proto_rawDescGZIP() []byte { + file_backend_api_v2beta1_artifact_proto_rawDescOnce.Do(func() { + file_backend_api_v2beta1_artifact_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_artifact_proto_rawDesc), len(file_backend_api_v2beta1_artifact_proto_rawDesc))) + }) + return file_backend_api_v2beta1_artifact_proto_rawDescData +} + +var file_backend_api_v2beta1_artifact_proto_enumTypes = make([]protoimpl.EnumInfo, 2) +var file_backend_api_v2beta1_artifact_proto_msgTypes = make([]protoimpl.MessageInfo, 15) +var file_backend_api_v2beta1_artifact_proto_goTypes = []any{ + (IOType)(0), // 0: kubeflow.pipelines.backend.api.v2beta1.IOType + (Artifact_ArtifactType)(0), // 1: kubeflow.pipelines.backend.api.v2beta1.Artifact.ArtifactType + (*CreateArtifactRequest)(nil), // 2: kubeflow.pipelines.backend.api.v2beta1.CreateArtifactRequest + (*CreateArtifactsBulkRequest)(nil), // 3: kubeflow.pipelines.backend.api.v2beta1.CreateArtifactsBulkRequest + (*CreateArtifactsBulkResponse)(nil), // 4: kubeflow.pipelines.backend.api.v2beta1.CreateArtifactsBulkResponse + (*GetArtifactRequest)(nil), // 5: kubeflow.pipelines.backend.api.v2beta1.GetArtifactRequest + (*ListArtifactRequest)(nil), // 6: kubeflow.pipelines.backend.api.v2beta1.ListArtifactRequest + (*ListArtifactResponse)(nil), // 7: kubeflow.pipelines.backend.api.v2beta1.ListArtifactResponse + (*ListArtifactTasksRequest)(nil), // 8: kubeflow.pipelines.backend.api.v2beta1.ListArtifactTasksRequest + (*ListArtifactTasksResponse)(nil), // 9: kubeflow.pipelines.backend.api.v2beta1.ListArtifactTasksResponse + (*CreateArtifactTaskRequest)(nil), // 10: kubeflow.pipelines.backend.api.v2beta1.CreateArtifactTaskRequest + (*CreateArtifactTasksBulkRequest)(nil), // 11: kubeflow.pipelines.backend.api.v2beta1.CreateArtifactTasksBulkRequest + (*CreateArtifactTasksBulkResponse)(nil), // 12: kubeflow.pipelines.backend.api.v2beta1.CreateArtifactTasksBulkResponse + (*IOProducer)(nil), // 13: kubeflow.pipelines.backend.api.v2beta1.IOProducer + (*ArtifactTask)(nil), // 14: kubeflow.pipelines.backend.api.v2beta1.ArtifactTask + (*Artifact)(nil), // 15: kubeflow.pipelines.backend.api.v2beta1.Artifact + nil, // 16: kubeflow.pipelines.backend.api.v2beta1.Artifact.MetadataEntry + (*timestamppb.Timestamp)(nil), // 17: google.protobuf.Timestamp + (*structpb.Value)(nil), // 18: google.protobuf.Value +} +var file_backend_api_v2beta1_artifact_proto_depIdxs = []int32{ + 15, // 0: kubeflow.pipelines.backend.api.v2beta1.CreateArtifactRequest.artifact:type_name -> kubeflow.pipelines.backend.api.v2beta1.Artifact + 0, // 1: kubeflow.pipelines.backend.api.v2beta1.CreateArtifactRequest.type:type_name -> kubeflow.pipelines.backend.api.v2beta1.IOType + 2, // 2: kubeflow.pipelines.backend.api.v2beta1.CreateArtifactsBulkRequest.artifacts:type_name -> kubeflow.pipelines.backend.api.v2beta1.CreateArtifactRequest + 15, // 3: kubeflow.pipelines.backend.api.v2beta1.CreateArtifactsBulkResponse.artifacts:type_name -> kubeflow.pipelines.backend.api.v2beta1.Artifact + 15, // 4: kubeflow.pipelines.backend.api.v2beta1.ListArtifactResponse.artifacts:type_name -> kubeflow.pipelines.backend.api.v2beta1.Artifact + 0, // 5: kubeflow.pipelines.backend.api.v2beta1.ListArtifactTasksRequest.type:type_name -> kubeflow.pipelines.backend.api.v2beta1.IOType + 14, // 6: kubeflow.pipelines.backend.api.v2beta1.ListArtifactTasksResponse.artifact_tasks:type_name -> kubeflow.pipelines.backend.api.v2beta1.ArtifactTask + 14, // 7: kubeflow.pipelines.backend.api.v2beta1.CreateArtifactTaskRequest.artifact_task:type_name -> kubeflow.pipelines.backend.api.v2beta1.ArtifactTask + 14, // 8: kubeflow.pipelines.backend.api.v2beta1.CreateArtifactTasksBulkRequest.artifact_tasks:type_name -> kubeflow.pipelines.backend.api.v2beta1.ArtifactTask + 14, // 9: kubeflow.pipelines.backend.api.v2beta1.CreateArtifactTasksBulkResponse.artifact_tasks:type_name -> kubeflow.pipelines.backend.api.v2beta1.ArtifactTask + 0, // 10: kubeflow.pipelines.backend.api.v2beta1.ArtifactTask.type:type_name -> kubeflow.pipelines.backend.api.v2beta1.IOType + 13, // 11: kubeflow.pipelines.backend.api.v2beta1.ArtifactTask.producer:type_name -> kubeflow.pipelines.backend.api.v2beta1.IOProducer + 1, // 12: kubeflow.pipelines.backend.api.v2beta1.Artifact.type:type_name -> kubeflow.pipelines.backend.api.v2beta1.Artifact.ArtifactType + 16, // 13: kubeflow.pipelines.backend.api.v2beta1.Artifact.metadata:type_name -> kubeflow.pipelines.backend.api.v2beta1.Artifact.MetadataEntry + 17, // 14: kubeflow.pipelines.backend.api.v2beta1.Artifact.created_at:type_name -> google.protobuf.Timestamp + 18, // 15: kubeflow.pipelines.backend.api.v2beta1.Artifact.MetadataEntry.value:type_name -> google.protobuf.Value + 6, // 16: kubeflow.pipelines.backend.api.v2beta1.ArtifactService.ListArtifacts:input_type -> kubeflow.pipelines.backend.api.v2beta1.ListArtifactRequest + 5, // 17: kubeflow.pipelines.backend.api.v2beta1.ArtifactService.GetArtifact:input_type -> kubeflow.pipelines.backend.api.v2beta1.GetArtifactRequest + 2, // 18: kubeflow.pipelines.backend.api.v2beta1.ArtifactService.CreateArtifact:input_type -> kubeflow.pipelines.backend.api.v2beta1.CreateArtifactRequest + 3, // 19: kubeflow.pipelines.backend.api.v2beta1.ArtifactService.CreateArtifactsBulk:input_type -> kubeflow.pipelines.backend.api.v2beta1.CreateArtifactsBulkRequest + 8, // 20: kubeflow.pipelines.backend.api.v2beta1.ArtifactService.ListArtifactTasks:input_type -> kubeflow.pipelines.backend.api.v2beta1.ListArtifactTasksRequest + 10, // 21: kubeflow.pipelines.backend.api.v2beta1.ArtifactService.CreateArtifactTask:input_type -> kubeflow.pipelines.backend.api.v2beta1.CreateArtifactTaskRequest + 11, // 22: kubeflow.pipelines.backend.api.v2beta1.ArtifactService.CreateArtifactTasksBulk:input_type -> kubeflow.pipelines.backend.api.v2beta1.CreateArtifactTasksBulkRequest + 7, // 23: kubeflow.pipelines.backend.api.v2beta1.ArtifactService.ListArtifacts:output_type -> kubeflow.pipelines.backend.api.v2beta1.ListArtifactResponse + 15, // 24: kubeflow.pipelines.backend.api.v2beta1.ArtifactService.GetArtifact:output_type -> kubeflow.pipelines.backend.api.v2beta1.Artifact + 15, // 25: kubeflow.pipelines.backend.api.v2beta1.ArtifactService.CreateArtifact:output_type -> kubeflow.pipelines.backend.api.v2beta1.Artifact + 4, // 26: kubeflow.pipelines.backend.api.v2beta1.ArtifactService.CreateArtifactsBulk:output_type -> kubeflow.pipelines.backend.api.v2beta1.CreateArtifactsBulkResponse + 9, // 27: kubeflow.pipelines.backend.api.v2beta1.ArtifactService.ListArtifactTasks:output_type -> kubeflow.pipelines.backend.api.v2beta1.ListArtifactTasksResponse + 14, // 28: kubeflow.pipelines.backend.api.v2beta1.ArtifactService.CreateArtifactTask:output_type -> kubeflow.pipelines.backend.api.v2beta1.ArtifactTask + 12, // 29: kubeflow.pipelines.backend.api.v2beta1.ArtifactService.CreateArtifactTasksBulk:output_type -> kubeflow.pipelines.backend.api.v2beta1.CreateArtifactTasksBulkResponse + 23, // [23:30] is the sub-list for method output_type + 16, // [16:23] is the sub-list for method input_type + 16, // [16:16] is the sub-list for extension type_name + 16, // [16:16] is the sub-list for extension extendee + 0, // [0:16] is the sub-list for field type_name +} + +func init() { file_backend_api_v2beta1_artifact_proto_init() } +func file_backend_api_v2beta1_artifact_proto_init() { + if File_backend_api_v2beta1_artifact_proto != nil { + return + } + file_backend_api_v2beta1_artifact_proto_msgTypes[0].OneofWrappers = []any{} + file_backend_api_v2beta1_artifact_proto_msgTypes[11].OneofWrappers = []any{} + file_backend_api_v2beta1_artifact_proto_msgTypes[13].OneofWrappers = []any{} + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_artifact_proto_rawDesc), len(file_backend_api_v2beta1_artifact_proto_rawDesc)), + NumEnums: 2, + NumMessages: 15, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_backend_api_v2beta1_artifact_proto_goTypes, + DependencyIndexes: file_backend_api_v2beta1_artifact_proto_depIdxs, + EnumInfos: file_backend_api_v2beta1_artifact_proto_enumTypes, + MessageInfos: file_backend_api_v2beta1_artifact_proto_msgTypes, + }.Build() + File_backend_api_v2beta1_artifact_proto = out.File + file_backend_api_v2beta1_artifact_proto_goTypes = nil + file_backend_api_v2beta1_artifact_proto_depIdxs = nil +} diff --git a/backend/api/v2beta1/go_client/artifact.pb.gw.go b/backend/api/v2beta1/go_client/artifact.pb.gw.go new file mode 100644 index 00000000000..ea49bf25b46 --- /dev/null +++ b/backend/api/v2beta1/go_client/artifact.pb.gw.go @@ -0,0 +1,581 @@ +// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. +// source: backend/api/v2beta1/artifact.proto + +/* +Package go_client is a reverse proxy. + +It translates gRPC into RESTful JSON APIs. +*/ +package go_client + +import ( + "context" + "errors" + "io" + "net/http" + + "github.com/grpc-ecosystem/grpc-gateway/v2/runtime" + "github.com/grpc-ecosystem/grpc-gateway/v2/utilities" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/status" + "google.golang.org/protobuf/proto" +) + +// Suppress "imported and not used" errors +var ( + _ codes.Code + _ io.Reader + _ status.Status + _ = errors.New + _ = runtime.String + _ = utilities.NewDoubleArray + _ = metadata.Join +) + +var filter_ArtifactService_ListArtifacts_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} + +func request_ArtifactService_ListArtifacts_0(ctx context.Context, marshaler runtime.Marshaler, client ArtifactServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq ListArtifactRequest + metadata runtime.ServerMetadata + ) + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ArtifactService_ListArtifacts_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + msg, err := client.ListArtifacts(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err +} + +func local_request_ArtifactService_ListArtifacts_0(ctx context.Context, marshaler runtime.Marshaler, server ArtifactServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq ListArtifactRequest + metadata runtime.ServerMetadata + ) + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ArtifactService_ListArtifacts_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + msg, err := server.ListArtifacts(ctx, &protoReq) + return msg, metadata, err +} + +func request_ArtifactService_GetArtifact_0(ctx context.Context, marshaler runtime.Marshaler, client ArtifactServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq GetArtifactRequest + metadata runtime.ServerMetadata + err error + ) + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["artifact_id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "artifact_id") + } + protoReq.ArtifactId, err = runtime.String(val) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "artifact_id", err) + } + msg, err := client.GetArtifact(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err +} + +func local_request_ArtifactService_GetArtifact_0(ctx context.Context, marshaler runtime.Marshaler, server ArtifactServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq GetArtifactRequest + metadata runtime.ServerMetadata + err error + ) + val, ok := pathParams["artifact_id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "artifact_id") + } + protoReq.ArtifactId, err = runtime.String(val) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "artifact_id", err) + } + msg, err := server.GetArtifact(ctx, &protoReq) + return msg, metadata, err +} + +func request_ArtifactService_CreateArtifact_0(ctx context.Context, marshaler runtime.Marshaler, client ArtifactServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq CreateArtifactRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + msg, err := client.CreateArtifact(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err +} + +func local_request_ArtifactService_CreateArtifact_0(ctx context.Context, marshaler runtime.Marshaler, server ArtifactServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq CreateArtifactRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + msg, err := server.CreateArtifact(ctx, &protoReq) + return msg, metadata, err +} + +func request_ArtifactService_CreateArtifactsBulk_0(ctx context.Context, marshaler runtime.Marshaler, client ArtifactServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq CreateArtifactsBulkRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + msg, err := client.CreateArtifactsBulk(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err +} + +func local_request_ArtifactService_CreateArtifactsBulk_0(ctx context.Context, marshaler runtime.Marshaler, server ArtifactServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq CreateArtifactsBulkRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + msg, err := server.CreateArtifactsBulk(ctx, &protoReq) + return msg, metadata, err +} + +var filter_ArtifactService_ListArtifactTasks_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} + +func request_ArtifactService_ListArtifactTasks_0(ctx context.Context, marshaler runtime.Marshaler, client ArtifactServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq ListArtifactTasksRequest + metadata runtime.ServerMetadata + ) + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ArtifactService_ListArtifactTasks_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + msg, err := client.ListArtifactTasks(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err +} + +func local_request_ArtifactService_ListArtifactTasks_0(ctx context.Context, marshaler runtime.Marshaler, server ArtifactServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq ListArtifactTasksRequest + metadata runtime.ServerMetadata + ) + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ArtifactService_ListArtifactTasks_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + msg, err := server.ListArtifactTasks(ctx, &protoReq) + return msg, metadata, err +} + +func request_ArtifactService_CreateArtifactTask_0(ctx context.Context, marshaler runtime.Marshaler, client ArtifactServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq CreateArtifactTaskRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + msg, err := client.CreateArtifactTask(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err +} + +func local_request_ArtifactService_CreateArtifactTask_0(ctx context.Context, marshaler runtime.Marshaler, server ArtifactServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq CreateArtifactTaskRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + msg, err := server.CreateArtifactTask(ctx, &protoReq) + return msg, metadata, err +} + +func request_ArtifactService_CreateArtifactTasksBulk_0(ctx context.Context, marshaler runtime.Marshaler, client ArtifactServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq CreateArtifactTasksBulkRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + msg, err := client.CreateArtifactTasksBulk(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err +} + +func local_request_ArtifactService_CreateArtifactTasksBulk_0(ctx context.Context, marshaler runtime.Marshaler, server ArtifactServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq CreateArtifactTasksBulkRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + msg, err := server.CreateArtifactTasksBulk(ctx, &protoReq) + return msg, metadata, err +} + +// RegisterArtifactServiceHandlerServer registers the http handlers for service ArtifactService to "mux". +// UnaryRPC :call ArtifactServiceServer directly. +// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. +// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterArtifactServiceHandlerFromEndpoint instead. +// GRPC interceptors will not work for this type of registration. To use interceptors, you must use the "runtime.WithMiddlewares" option in the "runtime.NewServeMux" call. +func RegisterArtifactServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server ArtifactServiceServer) error { + mux.Handle(http.MethodGet, pattern_ArtifactService_ListArtifacts_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/ListArtifacts", runtime.WithHTTPPathPattern("/apis/v2beta1/artifacts")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_ArtifactService_ListArtifacts_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ArtifactService_ListArtifacts_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodGet, pattern_ArtifactService_GetArtifact_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/GetArtifact", runtime.WithHTTPPathPattern("/apis/v2beta1/artifacts/{artifact_id}")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_ArtifactService_GetArtifact_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ArtifactService_GetArtifact_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodPost, pattern_ArtifactService_CreateArtifact_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/CreateArtifact", runtime.WithHTTPPathPattern("/apis/v2beta1/artifacts")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_ArtifactService_CreateArtifact_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ArtifactService_CreateArtifact_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodPost, pattern_ArtifactService_CreateArtifactsBulk_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/CreateArtifactsBulk", runtime.WithHTTPPathPattern("/apis/v2beta1/artifacts:batchCreate")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_ArtifactService_CreateArtifactsBulk_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ArtifactService_CreateArtifactsBulk_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodGet, pattern_ArtifactService_ListArtifactTasks_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/ListArtifactTasks", runtime.WithHTTPPathPattern("/apis/v2beta1/artifact_tasks")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_ArtifactService_ListArtifactTasks_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ArtifactService_ListArtifactTasks_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodPost, pattern_ArtifactService_CreateArtifactTask_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/CreateArtifactTask", runtime.WithHTTPPathPattern("/apis/v2beta1/artifact_tasks")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_ArtifactService_CreateArtifactTask_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ArtifactService_CreateArtifactTask_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodPost, pattern_ArtifactService_CreateArtifactTasksBulk_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/CreateArtifactTasksBulk", runtime.WithHTTPPathPattern("/apis/v2beta1/artifact_tasks:batchCreate")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_ArtifactService_CreateArtifactTasksBulk_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ArtifactService_CreateArtifactTasksBulk_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + + return nil +} + +// RegisterArtifactServiceHandlerFromEndpoint is same as RegisterArtifactServiceHandler but +// automatically dials to "endpoint" and closes the connection when "ctx" gets done. +func RegisterArtifactServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { + conn, err := grpc.NewClient(endpoint, opts...) + if err != nil { + return err + } + defer func() { + if err != nil { + if cerr := conn.Close(); cerr != nil { + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) + } + return + } + go func() { + <-ctx.Done() + if cerr := conn.Close(); cerr != nil { + grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr) + } + }() + }() + return RegisterArtifactServiceHandler(ctx, mux, conn) +} + +// RegisterArtifactServiceHandler registers the http handlers for service ArtifactService to "mux". +// The handlers forward requests to the grpc endpoint over "conn". +func RegisterArtifactServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { + return RegisterArtifactServiceHandlerClient(ctx, mux, NewArtifactServiceClient(conn)) +} + +// RegisterArtifactServiceHandlerClient registers the http handlers for service ArtifactService +// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "ArtifactServiceClient". +// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "ArtifactServiceClient" +// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in +// "ArtifactServiceClient" to call the correct interceptors. This client ignores the HTTP middlewares. +func RegisterArtifactServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client ArtifactServiceClient) error { + mux.Handle(http.MethodGet, pattern_ArtifactService_ListArtifacts_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/ListArtifacts", runtime.WithHTTPPathPattern("/apis/v2beta1/artifacts")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_ArtifactService_ListArtifacts_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ArtifactService_ListArtifacts_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodGet, pattern_ArtifactService_GetArtifact_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/GetArtifact", runtime.WithHTTPPathPattern("/apis/v2beta1/artifacts/{artifact_id}")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_ArtifactService_GetArtifact_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ArtifactService_GetArtifact_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodPost, pattern_ArtifactService_CreateArtifact_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/CreateArtifact", runtime.WithHTTPPathPattern("/apis/v2beta1/artifacts")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_ArtifactService_CreateArtifact_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ArtifactService_CreateArtifact_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodPost, pattern_ArtifactService_CreateArtifactsBulk_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/CreateArtifactsBulk", runtime.WithHTTPPathPattern("/apis/v2beta1/artifacts:batchCreate")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_ArtifactService_CreateArtifactsBulk_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ArtifactService_CreateArtifactsBulk_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodGet, pattern_ArtifactService_ListArtifactTasks_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/ListArtifactTasks", runtime.WithHTTPPathPattern("/apis/v2beta1/artifact_tasks")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_ArtifactService_ListArtifactTasks_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ArtifactService_ListArtifactTasks_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodPost, pattern_ArtifactService_CreateArtifactTask_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/CreateArtifactTask", runtime.WithHTTPPathPattern("/apis/v2beta1/artifact_tasks")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_ArtifactService_CreateArtifactTask_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ArtifactService_CreateArtifactTask_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodPost, pattern_ArtifactService_CreateArtifactTasksBulk_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/CreateArtifactTasksBulk", runtime.WithHTTPPathPattern("/apis/v2beta1/artifact_tasks:batchCreate")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_ArtifactService_CreateArtifactTasksBulk_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_ArtifactService_CreateArtifactTasksBulk_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + return nil +} + +var ( + pattern_ArtifactService_ListArtifacts_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "artifacts"}, "")) + pattern_ArtifactService_GetArtifact_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "artifacts", "artifact_id"}, "")) + pattern_ArtifactService_CreateArtifact_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "artifacts"}, "")) + pattern_ArtifactService_CreateArtifactsBulk_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "artifacts"}, "batchCreate")) + pattern_ArtifactService_ListArtifactTasks_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "artifact_tasks"}, "")) + pattern_ArtifactService_CreateArtifactTask_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "artifact_tasks"}, "")) + pattern_ArtifactService_CreateArtifactTasksBulk_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "artifact_tasks"}, "batchCreate")) +) + +var ( + forward_ArtifactService_ListArtifacts_0 = runtime.ForwardResponseMessage + forward_ArtifactService_GetArtifact_0 = runtime.ForwardResponseMessage + forward_ArtifactService_CreateArtifact_0 = runtime.ForwardResponseMessage + forward_ArtifactService_CreateArtifactsBulk_0 = runtime.ForwardResponseMessage + forward_ArtifactService_ListArtifactTasks_0 = runtime.ForwardResponseMessage + forward_ArtifactService_CreateArtifactTask_0 = runtime.ForwardResponseMessage + forward_ArtifactService_CreateArtifactTasksBulk_0 = runtime.ForwardResponseMessage +) diff --git a/backend/api/v2beta1/go_client/artifact_grpc.pb.go b/backend/api/v2beta1/go_client/artifact_grpc.pb.go new file mode 100644 index 00000000000..2d46092263b --- /dev/null +++ b/backend/api/v2beta1/go_client/artifact_grpc.pb.go @@ -0,0 +1,381 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.5.1 +// - protoc v6.31.1 +// source: backend/api/v2beta1/artifact.proto + +package go_client + +import ( + context "context" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.64.0 or later. +const _ = grpc.SupportPackageIsVersion9 + +const ( + ArtifactService_ListArtifacts_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/ListArtifacts" + ArtifactService_GetArtifact_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/GetArtifact" + ArtifactService_CreateArtifact_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/CreateArtifact" + ArtifactService_CreateArtifactsBulk_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/CreateArtifactsBulk" + ArtifactService_ListArtifactTasks_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/ListArtifactTasks" + ArtifactService_CreateArtifactTask_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/CreateArtifactTask" + ArtifactService_CreateArtifactTasksBulk_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.ArtifactService/CreateArtifactTasksBulk" +) + +// ArtifactServiceClient is the client API for ArtifactService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type ArtifactServiceClient interface { + // Finds all artifacts within the specified namespace. + ListArtifacts(ctx context.Context, in *ListArtifactRequest, opts ...grpc.CallOption) (*ListArtifactResponse, error) + // Finds a specific Artifact by ID. + GetArtifact(ctx context.Context, in *GetArtifactRequest, opts ...grpc.CallOption) (*Artifact, error) + // Creates a new artifact. + CreateArtifact(ctx context.Context, in *CreateArtifactRequest, opts ...grpc.CallOption) (*Artifact, error) + CreateArtifactsBulk(ctx context.Context, in *CreateArtifactsBulkRequest, opts ...grpc.CallOption) (*CreateArtifactsBulkResponse, error) + // List ArtifactTasks. + ListArtifactTasks(ctx context.Context, in *ListArtifactTasksRequest, opts ...grpc.CallOption) (*ListArtifactTasksResponse, error) + // Creates an artifact-task relationship. + // While we always create an artifact-task link when an artifact is created, + // In the case of Importer, we only create a link (and not an artifact) + // if Reimport = false. + CreateArtifactTask(ctx context.Context, in *CreateArtifactTaskRequest, opts ...grpc.CallOption) (*ArtifactTask, error) + // Creates multiple artifact-task relationships in bulk. + CreateArtifactTasksBulk(ctx context.Context, in *CreateArtifactTasksBulkRequest, opts ...grpc.CallOption) (*CreateArtifactTasksBulkResponse, error) +} + +type artifactServiceClient struct { + cc grpc.ClientConnInterface +} + +func NewArtifactServiceClient(cc grpc.ClientConnInterface) ArtifactServiceClient { + return &artifactServiceClient{cc} +} + +func (c *artifactServiceClient) ListArtifacts(ctx context.Context, in *ListArtifactRequest, opts ...grpc.CallOption) (*ListArtifactResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ListArtifactResponse) + err := c.cc.Invoke(ctx, ArtifactService_ListArtifacts_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *artifactServiceClient) GetArtifact(ctx context.Context, in *GetArtifactRequest, opts ...grpc.CallOption) (*Artifact, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Artifact) + err := c.cc.Invoke(ctx, ArtifactService_GetArtifact_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *artifactServiceClient) CreateArtifact(ctx context.Context, in *CreateArtifactRequest, opts ...grpc.CallOption) (*Artifact, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(Artifact) + err := c.cc.Invoke(ctx, ArtifactService_CreateArtifact_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *artifactServiceClient) CreateArtifactsBulk(ctx context.Context, in *CreateArtifactsBulkRequest, opts ...grpc.CallOption) (*CreateArtifactsBulkResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(CreateArtifactsBulkResponse) + err := c.cc.Invoke(ctx, ArtifactService_CreateArtifactsBulk_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *artifactServiceClient) ListArtifactTasks(ctx context.Context, in *ListArtifactTasksRequest, opts ...grpc.CallOption) (*ListArtifactTasksResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ListArtifactTasksResponse) + err := c.cc.Invoke(ctx, ArtifactService_ListArtifactTasks_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *artifactServiceClient) CreateArtifactTask(ctx context.Context, in *CreateArtifactTaskRequest, opts ...grpc.CallOption) (*ArtifactTask, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ArtifactTask) + err := c.cc.Invoke(ctx, ArtifactService_CreateArtifactTask_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *artifactServiceClient) CreateArtifactTasksBulk(ctx context.Context, in *CreateArtifactTasksBulkRequest, opts ...grpc.CallOption) (*CreateArtifactTasksBulkResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(CreateArtifactTasksBulkResponse) + err := c.cc.Invoke(ctx, ArtifactService_CreateArtifactTasksBulk_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ArtifactServiceServer is the server API for ArtifactService service. +// All implementations must embed UnimplementedArtifactServiceServer +// for forward compatibility. +type ArtifactServiceServer interface { + // Finds all artifacts within the specified namespace. + ListArtifacts(context.Context, *ListArtifactRequest) (*ListArtifactResponse, error) + // Finds a specific Artifact by ID. + GetArtifact(context.Context, *GetArtifactRequest) (*Artifact, error) + // Creates a new artifact. + CreateArtifact(context.Context, *CreateArtifactRequest) (*Artifact, error) + CreateArtifactsBulk(context.Context, *CreateArtifactsBulkRequest) (*CreateArtifactsBulkResponse, error) + // List ArtifactTasks. + ListArtifactTasks(context.Context, *ListArtifactTasksRequest) (*ListArtifactTasksResponse, error) + // Creates an artifact-task relationship. + // While we always create an artifact-task link when an artifact is created, + // In the case of Importer, we only create a link (and not an artifact) + // if Reimport = false. + CreateArtifactTask(context.Context, *CreateArtifactTaskRequest) (*ArtifactTask, error) + // Creates multiple artifact-task relationships in bulk. + CreateArtifactTasksBulk(context.Context, *CreateArtifactTasksBulkRequest) (*CreateArtifactTasksBulkResponse, error) + mustEmbedUnimplementedArtifactServiceServer() +} + +// UnimplementedArtifactServiceServer must be embedded to have +// forward compatible implementations. +// +// NOTE: this should be embedded by value instead of pointer to avoid a nil +// pointer dereference when methods are called. +type UnimplementedArtifactServiceServer struct{} + +func (UnimplementedArtifactServiceServer) ListArtifacts(context.Context, *ListArtifactRequest) (*ListArtifactResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListArtifacts not implemented") +} +func (UnimplementedArtifactServiceServer) GetArtifact(context.Context, *GetArtifactRequest) (*Artifact, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetArtifact not implemented") +} +func (UnimplementedArtifactServiceServer) CreateArtifact(context.Context, *CreateArtifactRequest) (*Artifact, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateArtifact not implemented") +} +func (UnimplementedArtifactServiceServer) CreateArtifactsBulk(context.Context, *CreateArtifactsBulkRequest) (*CreateArtifactsBulkResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateArtifactsBulk not implemented") +} +func (UnimplementedArtifactServiceServer) ListArtifactTasks(context.Context, *ListArtifactTasksRequest) (*ListArtifactTasksResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListArtifactTasks not implemented") +} +func (UnimplementedArtifactServiceServer) CreateArtifactTask(context.Context, *CreateArtifactTaskRequest) (*ArtifactTask, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateArtifactTask not implemented") +} +func (UnimplementedArtifactServiceServer) CreateArtifactTasksBulk(context.Context, *CreateArtifactTasksBulkRequest) (*CreateArtifactTasksBulkResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateArtifactTasksBulk not implemented") +} +func (UnimplementedArtifactServiceServer) mustEmbedUnimplementedArtifactServiceServer() {} +func (UnimplementedArtifactServiceServer) testEmbeddedByValue() {} + +// UnsafeArtifactServiceServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to ArtifactServiceServer will +// result in compilation errors. +type UnsafeArtifactServiceServer interface { + mustEmbedUnimplementedArtifactServiceServer() +} + +func RegisterArtifactServiceServer(s grpc.ServiceRegistrar, srv ArtifactServiceServer) { + // If the following call pancis, it indicates UnimplementedArtifactServiceServer was + // embedded by pointer and is nil. This will cause panics if an + // unimplemented method is ever invoked, so we test this at initialization + // time to prevent it from happening at runtime later due to I/O. + if t, ok := srv.(interface{ testEmbeddedByValue() }); ok { + t.testEmbeddedByValue() + } + s.RegisterService(&ArtifactService_ServiceDesc, srv) +} + +func _ArtifactService_ListArtifacts_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListArtifactRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ArtifactServiceServer).ListArtifacts(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ArtifactService_ListArtifacts_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ArtifactServiceServer).ListArtifacts(ctx, req.(*ListArtifactRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ArtifactService_GetArtifact_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetArtifactRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ArtifactServiceServer).GetArtifact(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ArtifactService_GetArtifact_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ArtifactServiceServer).GetArtifact(ctx, req.(*GetArtifactRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ArtifactService_CreateArtifact_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateArtifactRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ArtifactServiceServer).CreateArtifact(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ArtifactService_CreateArtifact_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ArtifactServiceServer).CreateArtifact(ctx, req.(*CreateArtifactRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ArtifactService_CreateArtifactsBulk_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateArtifactsBulkRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ArtifactServiceServer).CreateArtifactsBulk(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ArtifactService_CreateArtifactsBulk_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ArtifactServiceServer).CreateArtifactsBulk(ctx, req.(*CreateArtifactsBulkRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ArtifactService_ListArtifactTasks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListArtifactTasksRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ArtifactServiceServer).ListArtifactTasks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ArtifactService_ListArtifactTasks_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ArtifactServiceServer).ListArtifactTasks(ctx, req.(*ListArtifactTasksRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ArtifactService_CreateArtifactTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateArtifactTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ArtifactServiceServer).CreateArtifactTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ArtifactService_CreateArtifactTask_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ArtifactServiceServer).CreateArtifactTask(ctx, req.(*CreateArtifactTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ArtifactService_CreateArtifactTasksBulk_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateArtifactTasksBulkRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ArtifactServiceServer).CreateArtifactTasksBulk(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ArtifactService_CreateArtifactTasksBulk_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ArtifactServiceServer).CreateArtifactTasksBulk(ctx, req.(*CreateArtifactTasksBulkRequest)) + } + return interceptor(ctx, in, info, handler) +} + +// ArtifactService_ServiceDesc is the grpc.ServiceDesc for ArtifactService service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var ArtifactService_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "kubeflow.pipelines.backend.api.v2beta1.ArtifactService", + HandlerType: (*ArtifactServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListArtifacts", + Handler: _ArtifactService_ListArtifacts_Handler, + }, + { + MethodName: "GetArtifact", + Handler: _ArtifactService_GetArtifact_Handler, + }, + { + MethodName: "CreateArtifact", + Handler: _ArtifactService_CreateArtifact_Handler, + }, + { + MethodName: "CreateArtifactsBulk", + Handler: _ArtifactService_CreateArtifactsBulk_Handler, + }, + { + MethodName: "ListArtifactTasks", + Handler: _ArtifactService_ListArtifactTasks_Handler, + }, + { + MethodName: "CreateArtifactTask", + Handler: _ArtifactService_CreateArtifactTask_Handler, + }, + { + MethodName: "CreateArtifactTasksBulk", + Handler: _ArtifactService_CreateArtifactTasksBulk_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/v2beta1/artifact.proto", +} diff --git a/backend/api/v2beta1/go_client/run.pb.go b/backend/api/v2beta1/go_client/run.pb.go index 5e040d1cd36..38aa3eec55f 100644 --- a/backend/api/v2beta1/go_client/run.pb.go +++ b/backend/api/v2beta1/go_client/run.pb.go @@ -172,6 +172,296 @@ func (Run_StorageState) EnumDescriptor() ([]byte, []int) { return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{0, 0} } +type PipelineTaskDetail_TaskPodType int32 + +const ( + PipelineTaskDetail_UNSPECIFIED PipelineTaskDetail_TaskPodType = 0 + PipelineTaskDetail_DRIVER PipelineTaskDetail_TaskPodType = 1 + PipelineTaskDetail_EXECUTOR PipelineTaskDetail_TaskPodType = 2 +) + +// Enum value maps for PipelineTaskDetail_TaskPodType. +var ( + PipelineTaskDetail_TaskPodType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "DRIVER", + 2: "EXECUTOR", + } + PipelineTaskDetail_TaskPodType_value = map[string]int32{ + "UNSPECIFIED": 0, + "DRIVER": 1, + "EXECUTOR": 2, + } +) + +func (x PipelineTaskDetail_TaskPodType) Enum() *PipelineTaskDetail_TaskPodType { + p := new(PipelineTaskDetail_TaskPodType) + *p = x + return p +} + +func (x PipelineTaskDetail_TaskPodType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (PipelineTaskDetail_TaskPodType) Descriptor() protoreflect.EnumDescriptor { + return file_backend_api_v2beta1_run_proto_enumTypes[2].Descriptor() +} + +func (PipelineTaskDetail_TaskPodType) Type() protoreflect.EnumType { + return &file_backend_api_v2beta1_run_proto_enumTypes[2] +} + +func (x PipelineTaskDetail_TaskPodType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use PipelineTaskDetail_TaskPodType.Descriptor instead. +func (PipelineTaskDetail_TaskPodType) EnumDescriptor() ([]byte, []int) { + return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{4, 0} +} + +// Runtime state of a Task +type PipelineTaskDetail_TaskState int32 + +const ( + // Default value. This value is not used. + PipelineTaskDetail_RUNTIME_STATE_UNSPECIFIED PipelineTaskDetail_TaskState = 0 + // Entity execution is in progress. + PipelineTaskDetail_RUNNING PipelineTaskDetail_TaskState = 1 + // Entity completed successfully. + PipelineTaskDetail_SUCCEEDED PipelineTaskDetail_TaskState = 2 + // Entity has been skipped. For example, due to caching. + PipelineTaskDetail_SKIPPED PipelineTaskDetail_TaskState = 3 + // Entity execution has failed. + PipelineTaskDetail_FAILED PipelineTaskDetail_TaskState = 4 + PipelineTaskDetail_CACHED PipelineTaskDetail_TaskState = 5 +) + +// Enum value maps for PipelineTaskDetail_TaskState. +var ( + PipelineTaskDetail_TaskState_name = map[int32]string{ + 0: "RUNTIME_STATE_UNSPECIFIED", + 1: "RUNNING", + 2: "SUCCEEDED", + 3: "SKIPPED", + 4: "FAILED", + 5: "CACHED", + } + PipelineTaskDetail_TaskState_value = map[string]int32{ + "RUNTIME_STATE_UNSPECIFIED": 0, + "RUNNING": 1, + "SUCCEEDED": 2, + "SKIPPED": 3, + "FAILED": 4, + "CACHED": 5, + } +) + +func (x PipelineTaskDetail_TaskState) Enum() *PipelineTaskDetail_TaskState { + p := new(PipelineTaskDetail_TaskState) + *p = x + return p +} + +func (x PipelineTaskDetail_TaskState) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (PipelineTaskDetail_TaskState) Descriptor() protoreflect.EnumDescriptor { + return file_backend_api_v2beta1_run_proto_enumTypes[3].Descriptor() +} + +func (PipelineTaskDetail_TaskState) Type() protoreflect.EnumType { + return &file_backend_api_v2beta1_run_proto_enumTypes[3] +} + +func (x PipelineTaskDetail_TaskState) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use PipelineTaskDetail_TaskState.Descriptor instead. +func (PipelineTaskDetail_TaskState) EnumDescriptor() ([]byte, []int) { + return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{4, 1} +} + +type PipelineTaskDetail_TaskType int32 + +const ( + // Root task is the top ancestor task to all tasks in the pipeline run + // It is the only task with no parent task in a Pipeline Run. + PipelineTaskDetail_ROOT PipelineTaskDetail_TaskType = 0 + // All child tasks in the Run DAG are Runtime tasks. With the exception + // of K8S driver pods. + // These tasks are the only tasks that have Executor Pods. + PipelineTaskDetail_RUNTIME PipelineTaskDetail_TaskType = 1 + // Condition Branch is the wrapper task of an If block + PipelineTaskDetail_CONDITION_BRANCH PipelineTaskDetail_TaskType = 2 + // Condition is an individual "if" branch, and is + // a child to a CONDITION_BRANCH task. + PipelineTaskDetail_CONDITION PipelineTaskDetail_TaskType = 3 + // Task Group for CONDITION_BRANCH + // Task Group for RUNTIME Loop Iterations + PipelineTaskDetail_LOOP PipelineTaskDetail_TaskType = 4 + PipelineTaskDetail_EXIT_HANDLER PipelineTaskDetail_TaskType = 5 + PipelineTaskDetail_IMPORTER PipelineTaskDetail_TaskType = 6 + // Generic DAG task type for types like Nested Pipelines + // where there is no declarative way to detect this within + // a driver. + PipelineTaskDetail_DAG PipelineTaskDetail_TaskType = 7 +) + +// Enum value maps for PipelineTaskDetail_TaskType. +var ( + PipelineTaskDetail_TaskType_name = map[int32]string{ + 0: "ROOT", + 1: "RUNTIME", + 2: "CONDITION_BRANCH", + 3: "CONDITION", + 4: "LOOP", + 5: "EXIT_HANDLER", + 6: "IMPORTER", + 7: "DAG", + } + PipelineTaskDetail_TaskType_value = map[string]int32{ + "ROOT": 0, + "RUNTIME": 1, + "CONDITION_BRANCH": 2, + "CONDITION": 3, + "LOOP": 4, + "EXIT_HANDLER": 5, + "IMPORTER": 6, + "DAG": 7, + } +) + +func (x PipelineTaskDetail_TaskType) Enum() *PipelineTaskDetail_TaskType { + p := new(PipelineTaskDetail_TaskType) + *p = x + return p +} + +func (x PipelineTaskDetail_TaskType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (PipelineTaskDetail_TaskType) Descriptor() protoreflect.EnumDescriptor { + return file_backend_api_v2beta1_run_proto_enumTypes[4].Descriptor() +} + +func (PipelineTaskDetail_TaskType) Type() protoreflect.EnumType { + return &file_backend_api_v2beta1_run_proto_enumTypes[4] +} + +func (x PipelineTaskDetail_TaskType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use PipelineTaskDetail_TaskType.Descriptor instead. +func (PipelineTaskDetail_TaskType) EnumDescriptor() ([]byte, []int) { + return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{4, 2} +} + +type GetRunRequest_ViewMode int32 + +const ( + // By default `tasks` field is omitted. + // This provides a faster and leaner run object. + GetRunRequest_DEFAULT GetRunRequest_ViewMode = 0 + // This view mode displays all the tasks for this run + // with all its fields populated. + GetRunRequest_FULL GetRunRequest_ViewMode = 1 +) + +// Enum value maps for GetRunRequest_ViewMode. +var ( + GetRunRequest_ViewMode_name = map[int32]string{ + 0: "DEFAULT", + 1: "FULL", + } + GetRunRequest_ViewMode_value = map[string]int32{ + "DEFAULT": 0, + "FULL": 1, + } +) + +func (x GetRunRequest_ViewMode) Enum() *GetRunRequest_ViewMode { + p := new(GetRunRequest_ViewMode) + *p = x + return p +} + +func (x GetRunRequest_ViewMode) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (GetRunRequest_ViewMode) Descriptor() protoreflect.EnumDescriptor { + return file_backend_api_v2beta1_run_proto_enumTypes[5].Descriptor() +} + +func (GetRunRequest_ViewMode) Type() protoreflect.EnumType { + return &file_backend_api_v2beta1_run_proto_enumTypes[5] +} + +func (x GetRunRequest_ViewMode) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use GetRunRequest_ViewMode.Descriptor instead. +func (GetRunRequest_ViewMode) EnumDescriptor() ([]byte, []int) { + return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{8, 0} +} + +type ListRunsRequest_ViewMode int32 + +const ( + // By default `tasks` field is omitted. + // This provides a faster and leaner run object. + ListRunsRequest_DEFAULT ListRunsRequest_ViewMode = 0 + // This view mode displays all the tasks for this run + // with all its fields populated. + ListRunsRequest_FULL ListRunsRequest_ViewMode = 1 +) + +// Enum value maps for ListRunsRequest_ViewMode. +var ( + ListRunsRequest_ViewMode_name = map[int32]string{ + 0: "DEFAULT", + 1: "FULL", + } + ListRunsRequest_ViewMode_value = map[string]int32{ + "DEFAULT": 0, + "FULL": 1, + } +) + +func (x ListRunsRequest_ViewMode) Enum() *ListRunsRequest_ViewMode { + p := new(ListRunsRequest_ViewMode) + *p = x + return p +} + +func (x ListRunsRequest_ViewMode) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ListRunsRequest_ViewMode) Descriptor() protoreflect.EnumDescriptor { + return file_backend_api_v2beta1_run_proto_enumTypes[6].Descriptor() +} + +func (ListRunsRequest_ViewMode) Type() protoreflect.EnumType { + return &file_backend_api_v2beta1_run_proto_enumTypes[6] +} + +func (x ListRunsRequest_ViewMode) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use ListRunsRequest_ViewMode.Descriptor instead. +func (ListRunsRequest_ViewMode) EnumDescriptor() ([]byte, []int) { + return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{9, 0} +} + type Run struct { state protoimpl.MessageState `protogen:"open.v1"` // Input. ID of the parent experiment. @@ -214,14 +504,21 @@ type Run struct { // how to handle the error. This is especially useful during listing call. Error *status.Status `protobuf:"bytes,14,opt,name=error,proto3" json:"error,omitempty"` // Output. Runtime details of a run. + // Either remove or deprecate this + // + // Deprecated: Marked as deprecated in backend/api/v2beta1/run.proto. RunDetails *RunDetails `protobuf:"bytes,15,opt,name=run_details,json=runDetails,proto3" json:"run_details,omitempty"` // ID of the recurring run that triggered this run. RecurringRunId string `protobuf:"bytes,16,opt,name=recurring_run_id,json=recurringRunId,proto3" json:"recurring_run_id,omitempty"` // Output. A sequence of run statuses. This field keeps a record // of state transitions. - StateHistory []*RuntimeStatus `protobuf:"bytes,17,rep,name=state_history,json=stateHistory,proto3" json:"state_history,omitempty"` - unknownFields protoimpl.UnknownFields - sizeCache protoimpl.SizeCache + StateHistory []*RuntimeStatus `protobuf:"bytes,17,rep,name=state_history,json=stateHistory,proto3" json:"state_history,omitempty"` + // Output only. Reference to the pipeline used for this run. + PipelineReference *PipelineVersionReference `protobuf:"bytes,19,opt,name=pipeline_reference,json=pipelineReference,proto3" json:"pipeline_reference,omitempty"` + TaskCount int32 `protobuf:"varint,20,opt,name=task_count,json=taskCount,proto3" json:"task_count,omitempty"` + Tasks []*PipelineTaskDetail `protobuf:"bytes,21,rep,name=tasks,proto3" json:"tasks,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *Run) Reset() { @@ -373,6 +670,7 @@ func (x *Run) GetError() *status.Status { return nil } +// Deprecated: Marked as deprecated in backend/api/v2beta1/run.proto. func (x *Run) GetRunDetails() *RunDetails { if x != nil { return x.RunDetails @@ -394,6 +692,27 @@ func (x *Run) GetStateHistory() []*RuntimeStatus { return nil } +func (x *Run) GetPipelineReference() *PipelineVersionReference { + if x != nil { + return x.PipelineReference + } + return nil +} + +func (x *Run) GetTaskCount() int32 { + if x != nil { + return x.TaskCount + } + return 0 +} + +func (x *Run) GetTasks() []*PipelineTaskDetail { + if x != nil { + return x.Tasks + } + return nil +} + type isRun_PipelineSource interface { isRun_PipelineSource() } @@ -611,43 +930,45 @@ func (x *RunDetails) GetTaskDetails() []*PipelineTaskDetail { // Runtime information of a task execution. type PipelineTaskDetail struct { state protoimpl.MessageState `protogen:"open.v1"` - // ID of the parent run. - RunId string `protobuf:"bytes,1,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` - // System-generated ID of a task. - TaskId string `protobuf:"bytes,2,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // User specified name of a task that is defined in // [Pipeline.spec][]. - DisplayName string `protobuf:"bytes,3,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // System-generated ID of a task. + TaskId string `protobuf:"bytes,3,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + // ID of the parent run. + RunId string `protobuf:"bytes,4,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + Pods []*PipelineTaskDetail_TaskPod `protobuf:"bytes,5,rep,name=pods,proto3" json:"pods,omitempty"` + CacheFingerprint string `protobuf:"bytes,6,opt,name=cache_fingerprint,json=cacheFingerprint,proto3" json:"cache_fingerprint,omitempty"` // Creation time of a task. - CreateTime *timestamppb.Timestamp `protobuf:"bytes,4,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + CreateTime *timestamppb.Timestamp `protobuf:"bytes,7,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` // Starting time of a task. - StartTime *timestamppb.Timestamp `protobuf:"bytes,5,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + StartTime *timestamppb.Timestamp `protobuf:"bytes,8,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` // Completion time of a task. - EndTime *timestamppb.Timestamp `protobuf:"bytes,6,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` - // Execution information of a task. - ExecutorDetail *PipelineTaskExecutorDetail `protobuf:"bytes,7,opt,name=executor_detail,json=executorDetail,proto3" json:"executor_detail,omitempty"` - // Runtime state of a task. - State RuntimeState `protobuf:"varint,8,opt,name=state,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.RuntimeState" json:"state,omitempty"` - // Execution id of the corresponding entry in ML metadata store. - ExecutionId int64 `protobuf:"varint,9,opt,name=execution_id,json=executionId,proto3" json:"execution_id,omitempty"` + EndTime *timestamppb.Timestamp `protobuf:"bytes,9,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + State PipelineTaskDetail_TaskState `protobuf:"varint,10,opt,name=state,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail_TaskState" json:"state,omitempty"` + StatusMetadata *PipelineTaskDetail_StatusMetadata `protobuf:"bytes,11,opt,name=status_metadata,json=statusMetadata,proto3" json:"status_metadata,omitempty"` + // A sequence of task statuses. This field keeps a record + // of state transitions. + StateHistory []*PipelineTaskDetail_TaskStatus `protobuf:"bytes,12,rep,name=state_history,json=stateHistory,proto3" json:"state_history,omitempty"` + Type PipelineTaskDetail_TaskType `protobuf:"varint,13,opt,name=type,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail_TaskType" json:"type,omitempty"` + TypeAttributes *PipelineTaskDetail_TypeAttributes `protobuf:"bytes,14,opt,name=type_attributes,json=typeAttributes,proto3" json:"type_attributes,omitempty"` // The error that occurred during task execution. // Only populated when the task is in FAILED or CANCELED state. - Error *status.Status `protobuf:"bytes,10,opt,name=error,proto3" json:"error,omitempty"` - // Input artifacts of the task. - Inputs map[string]*ArtifactList `protobuf:"bytes,11,rep,name=inputs,proto3" json:"inputs,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - // Output artifacts of the task. - Outputs map[string]*ArtifactList `protobuf:"bytes,12,rep,name=outputs,proto3" json:"outputs,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Error *status.Status `protobuf:"bytes,15,opt,name=error,proto3" json:"error,omitempty"` // ID of the parent task if the task is within a component scope. // Empty if the task is at the root level. - ParentTaskId string `protobuf:"bytes,13,opt,name=parent_task_id,json=parentTaskId,proto3" json:"parent_task_id,omitempty"` - // A sequence of task statuses. This field keeps a record - // of state transitions. - StateHistory []*RuntimeStatus `protobuf:"bytes,14,rep,name=state_history,json=stateHistory,proto3" json:"state_history,omitempty"` - // Name of the corresponding pod assigned by the orchestration engine. - // Also known as node_id. - PodName string `protobuf:"bytes,15,opt,name=pod_name,json=podName,proto3" json:"pod_name,omitempty"` - // Sequence of dependen tasks. - ChildTasks []*PipelineTaskDetail_ChildTask `protobuf:"bytes,16,rep,name=child_tasks,json=childTasks,proto3" json:"child_tasks,omitempty"` + ParentTaskId *string `protobuf:"bytes,16,opt,name=parent_task_id,json=parentTaskId,proto3,oneof" json:"parent_task_id,omitempty"` + // Sequence of dependent tasks. + ChildTasks []*PipelineTaskDetail_ChildTask `protobuf:"bytes,17,rep,name=child_tasks,json=childTasks,proto3" json:"child_tasks,omitempty"` + Inputs *PipelineTaskDetail_InputOutputs `protobuf:"bytes,18,opt,name=inputs,proto3" json:"inputs,omitempty"` + Outputs *PipelineTaskDetail_InputOutputs `protobuf:"bytes,19,opt,name=outputs,proto3" json:"outputs,omitempty"` + // The scope of this task within the + // pipeline spec. Each entry represents + // either a Dag Task or a Container task. + // Note that Container task will are + // always the last entry in a scope_path. + ScopePath []string `protobuf:"bytes,20,rep,name=scope_path,json=scopePath,proto3" json:"scope_path,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -682,9 +1003,16 @@ func (*PipelineTaskDetail) Descriptor() ([]byte, []int) { return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{4} } -func (x *PipelineTaskDetail) GetRunId() string { +func (x *PipelineTaskDetail) GetName() string { if x != nil { - return x.RunId + return x.Name + } + return "" +} + +func (x *PipelineTaskDetail) GetDisplayName() string { + if x != nil { + return x.DisplayName } return "" } @@ -696,9 +1024,23 @@ func (x *PipelineTaskDetail) GetTaskId() string { return "" } -func (x *PipelineTaskDetail) GetDisplayName() string { +func (x *PipelineTaskDetail) GetRunId() string { if x != nil { - return x.DisplayName + return x.RunId + } + return "" +} + +func (x *PipelineTaskDetail) GetPods() []*PipelineTaskDetail_TaskPod { + if x != nil { + return x.Pods + } + return nil +} + +func (x *PipelineTaskDetail) GetCacheFingerprint() string { + if x != nil { + return x.CacheFingerprint } return "" } @@ -724,72 +1066,79 @@ func (x *PipelineTaskDetail) GetEndTime() *timestamppb.Timestamp { return nil } -func (x *PipelineTaskDetail) GetExecutorDetail() *PipelineTaskExecutorDetail { +func (x *PipelineTaskDetail) GetState() PipelineTaskDetail_TaskState { if x != nil { - return x.ExecutorDetail + return x.State } - return nil + return PipelineTaskDetail_RUNTIME_STATE_UNSPECIFIED } -func (x *PipelineTaskDetail) GetState() RuntimeState { +func (x *PipelineTaskDetail) GetStatusMetadata() *PipelineTaskDetail_StatusMetadata { if x != nil { - return x.State + return x.StatusMetadata } - return RuntimeState_RUNTIME_STATE_UNSPECIFIED + return nil } -func (x *PipelineTaskDetail) GetExecutionId() int64 { +func (x *PipelineTaskDetail) GetStateHistory() []*PipelineTaskDetail_TaskStatus { if x != nil { - return x.ExecutionId + return x.StateHistory } - return 0 + return nil } -func (x *PipelineTaskDetail) GetError() *status.Status { +func (x *PipelineTaskDetail) GetType() PipelineTaskDetail_TaskType { if x != nil { - return x.Error + return x.Type } - return nil + return PipelineTaskDetail_ROOT } -func (x *PipelineTaskDetail) GetInputs() map[string]*ArtifactList { +func (x *PipelineTaskDetail) GetTypeAttributes() *PipelineTaskDetail_TypeAttributes { if x != nil { - return x.Inputs + return x.TypeAttributes } return nil } -func (x *PipelineTaskDetail) GetOutputs() map[string]*ArtifactList { +func (x *PipelineTaskDetail) GetError() *status.Status { if x != nil { - return x.Outputs + return x.Error } return nil } func (x *PipelineTaskDetail) GetParentTaskId() string { - if x != nil { - return x.ParentTaskId + if x != nil && x.ParentTaskId != nil { + return *x.ParentTaskId } return "" } -func (x *PipelineTaskDetail) GetStateHistory() []*RuntimeStatus { +func (x *PipelineTaskDetail) GetChildTasks() []*PipelineTaskDetail_ChildTask { if x != nil { - return x.StateHistory + return x.ChildTasks } return nil } -func (x *PipelineTaskDetail) GetPodName() string { +func (x *PipelineTaskDetail) GetInputs() *PipelineTaskDetail_InputOutputs { if x != nil { - return x.PodName + return x.Inputs } - return "" + return nil } -func (x *PipelineTaskDetail) GetChildTasks() []*PipelineTaskDetail_ChildTask { +func (x *PipelineTaskDetail) GetOutputs() *PipelineTaskDetail_InputOutputs { if x != nil { - return x.ChildTasks + return x.Outputs + } + return nil +} + +func (x *PipelineTaskDetail) GetScopePath() []string { + if x != nil { + return x.ScopePath } return nil } @@ -985,7 +1334,10 @@ type GetRunRequest struct { // Deprecated: Marked as deprecated in backend/api/v2beta1/run.proto. ExperimentId string `protobuf:"bytes,1,opt,name=experiment_id,json=experimentId,proto3" json:"experiment_id,omitempty"` // The ID of the run to be retrieved. - RunId string `protobuf:"bytes,2,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + RunId string `protobuf:"bytes,2,opt,name=run_id,json=runId,proto3" json:"run_id,omitempty"` + // Optional view mode. This field can be used to adjust + // how detailed the Run object that is returned will be. + View *GetRunRequest_ViewMode `protobuf:"varint,3,opt,name=view,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.GetRunRequest_ViewMode,oneof" json:"view,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -1035,6 +1387,13 @@ func (x *GetRunRequest) GetRunId() string { return "" } +func (x *GetRunRequest) GetView() GetRunRequest_ViewMode { + if x != nil && x.View != nil { + return *x.View + } + return GetRunRequest_DEFAULT +} + type ListRunsRequest struct { state protoimpl.MessageState `protogen:"open.v1"` // Optional input field. Filters based on the namespace. @@ -1054,7 +1413,10 @@ type ListRunsRequest struct { SortBy string `protobuf:"bytes,5,opt,name=sort_by,json=sortBy,proto3" json:"sort_by,omitempty"` // A url-encoded, JSON-serialized Filter protocol buffer (see // [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). - Filter string `protobuf:"bytes,6,opt,name=filter,proto3" json:"filter,omitempty"` + Filter string `protobuf:"bytes,6,opt,name=filter,proto3" json:"filter,omitempty"` + // Optional view mode. This field can be used to adjust + // how detailed the Run object that is returned will be. + View *ListRunsRequest_ViewMode `protobuf:"varint,7,opt,name=view,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.ListRunsRequest_ViewMode,oneof" json:"view,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -1131,6 +1493,13 @@ func (x *ListRunsRequest) GetFilter() string { return "" } +func (x *ListRunsRequest) GetView() ListRunsRequest_ViewMode { + if x != nil && x.View != nil { + return *x.View + } + return ListRunsRequest_DEFAULT +} + type TerminateRunRequest struct { state protoimpl.MessageState `protogen:"open.v1"` // The ID of the parent experiment. @@ -1599,34 +1968,28 @@ func (x *RetryRunRequest) GetRunId() string { return "" } -// A dependent task that requires this one to succeed. -// Represented by either task_id or pod_name. -type PipelineTaskDetail_ChildTask struct { - state protoimpl.MessageState `protogen:"open.v1"` - // Types that are valid to be assigned to ChildTask: - // - // *PipelineTaskDetail_ChildTask_TaskId - // *PipelineTaskDetail_ChildTask_PodName - ChildTask isPipelineTaskDetail_ChildTask_ChildTask `protobuf_oneof:"child_task"` +type CreateTaskRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + Task *PipelineTaskDetail `protobuf:"bytes,1,opt,name=task,proto3" json:"task,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } -func (x *PipelineTaskDetail_ChildTask) Reset() { - *x = PipelineTaskDetail_ChildTask{} - mi := &file_backend_api_v2beta1_run_proto_msgTypes[20] +func (x *CreateTaskRequest) Reset() { + *x = CreateTaskRequest{} + mi := &file_backend_api_v2beta1_run_proto_msgTypes[18] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } -func (x *PipelineTaskDetail_ChildTask) String() string { +func (x *CreateTaskRequest) String() string { return protoimpl.X.MessageStringOf(x) } -func (*PipelineTaskDetail_ChildTask) ProtoMessage() {} +func (*CreateTaskRequest) ProtoMessage() {} -func (x *PipelineTaskDetail_ChildTask) ProtoReflect() protoreflect.Message { - mi := &file_backend_api_v2beta1_run_proto_msgTypes[20] +func (x *CreateTaskRequest) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_run_proto_msgTypes[18] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1637,60 +2000,890 @@ func (x *PipelineTaskDetail_ChildTask) ProtoReflect() protoreflect.Message { return mi.MessageOf(x) } -// Deprecated: Use PipelineTaskDetail_ChildTask.ProtoReflect.Descriptor instead. -func (*PipelineTaskDetail_ChildTask) Descriptor() ([]byte, []int) { - return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{4, 2} +// Deprecated: Use CreateTaskRequest.ProtoReflect.Descriptor instead. +func (*CreateTaskRequest) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{18} } -func (x *PipelineTaskDetail_ChildTask) GetChildTask() isPipelineTaskDetail_ChildTask_ChildTask { +func (x *CreateTaskRequest) GetTask() *PipelineTaskDetail { if x != nil { - return x.ChildTask + return x.Task } return nil } -func (x *PipelineTaskDetail_ChildTask) GetTaskId() string { - if x != nil { - if x, ok := x.ChildTask.(*PipelineTaskDetail_ChildTask_TaskId); ok { - return x.TaskId - } - } - return "" +type UpdateTaskRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + Task *PipelineTaskDetail `protobuf:"bytes,2,opt,name=task,proto3" json:"task,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } -func (x *PipelineTaskDetail_ChildTask) GetPodName() string { - if x != nil { - if x, ok := x.ChildTask.(*PipelineTaskDetail_ChildTask_PodName); ok { - return x.PodName - } - } - return "" +func (x *UpdateTaskRequest) Reset() { + *x = UpdateTaskRequest{} + mi := &file_backend_api_v2beta1_run_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } -type isPipelineTaskDetail_ChildTask_ChildTask interface { - isPipelineTaskDetail_ChildTask_ChildTask() +func (x *UpdateTaskRequest) String() string { + return protoimpl.X.MessageStringOf(x) } -type PipelineTaskDetail_ChildTask_TaskId struct { - // System-generated ID of a task. - TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId,proto3,oneof"` -} +func (*UpdateTaskRequest) ProtoMessage() {} -type PipelineTaskDetail_ChildTask_PodName struct { - // Name of the corresponding pod assigned by the orchestration engine. - // Also known as node_id. - PodName string `protobuf:"bytes,2,opt,name=pod_name,json=podName,proto3,oneof"` -} +func (x *UpdateTaskRequest) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_run_proto_msgTypes[19] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateTaskRequest.ProtoReflect.Descriptor instead. +func (*UpdateTaskRequest) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{19} +} + +func (x *UpdateTaskRequest) GetTaskId() string { + if x != nil { + return x.TaskId + } + return "" +} + +func (x *UpdateTaskRequest) GetTask() *PipelineTaskDetail { + if x != nil { + return x.Task + } + return nil +} + +type UpdateTasksBulkRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Required. Map of task ID to task detail for bulk update. + // Key: task_id, Value: PipelineTaskDetail to update + Tasks map[string]*PipelineTaskDetail `protobuf:"bytes,1,rep,name=tasks,proto3" json:"tasks,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *UpdateTasksBulkRequest) Reset() { + *x = UpdateTasksBulkRequest{} + mi := &file_backend_api_v2beta1_run_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *UpdateTasksBulkRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateTasksBulkRequest) ProtoMessage() {} + +func (x *UpdateTasksBulkRequest) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_run_proto_msgTypes[20] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateTasksBulkRequest.ProtoReflect.Descriptor instead. +func (*UpdateTasksBulkRequest) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{20} +} + +func (x *UpdateTasksBulkRequest) GetTasks() map[string]*PipelineTaskDetail { + if x != nil { + return x.Tasks + } + return nil +} + +type UpdateTasksBulkResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Map of task ID to updated task detail. + // Key: task_id, Value: Updated PipelineTaskDetail + Tasks map[string]*PipelineTaskDetail `protobuf:"bytes,1,rep,name=tasks,proto3" json:"tasks,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *UpdateTasksBulkResponse) Reset() { + *x = UpdateTasksBulkResponse{} + mi := &file_backend_api_v2beta1_run_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *UpdateTasksBulkResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UpdateTasksBulkResponse) ProtoMessage() {} + +func (x *UpdateTasksBulkResponse) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_run_proto_msgTypes[21] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UpdateTasksBulkResponse.ProtoReflect.Descriptor instead. +func (*UpdateTasksBulkResponse) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{21} +} + +func (x *UpdateTasksBulkResponse) GetTasks() map[string]*PipelineTaskDetail { + if x != nil { + return x.Tasks + } + return nil +} + +type GetTaskRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *GetTaskRequest) Reset() { + *x = GetTaskRequest{} + mi := &file_backend_api_v2beta1_run_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *GetTaskRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetTaskRequest) ProtoMessage() {} + +func (x *GetTaskRequest) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_run_proto_msgTypes[22] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetTaskRequest.ProtoReflect.Descriptor instead. +func (*GetTaskRequest) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{22} +} + +func (x *GetTaskRequest) GetTaskId() string { + if x != nil { + return x.TaskId + } + return "" +} + +type ListTasksRequest struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Required. Must specify either parent_id, run_id, namespace to filter tasks. + // + // Types that are valid to be assigned to ParentFilter: + // + // *ListTasksRequest_ParentId + // *ListTasksRequest_RunId + // *ListTasksRequest_Namespace + ParentFilter isListTasksRequest_ParentFilter `protobuf_oneof:"parent_filter"` + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + PageToken string `protobuf:"bytes,5,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + Filter string `protobuf:"bytes,6,opt,name=filter,proto3" json:"filter,omitempty"` + OrderBy string `protobuf:"bytes,7,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ListTasksRequest) Reset() { + *x = ListTasksRequest{} + mi := &file_backend_api_v2beta1_run_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ListTasksRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListTasksRequest) ProtoMessage() {} + +func (x *ListTasksRequest) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_run_proto_msgTypes[23] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListTasksRequest.ProtoReflect.Descriptor instead. +func (*ListTasksRequest) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{23} +} + +func (x *ListTasksRequest) GetParentFilter() isListTasksRequest_ParentFilter { + if x != nil { + return x.ParentFilter + } + return nil +} + +func (x *ListTasksRequest) GetParentId() string { + if x != nil { + if x, ok := x.ParentFilter.(*ListTasksRequest_ParentId); ok { + return x.ParentId + } + } + return "" +} + +func (x *ListTasksRequest) GetRunId() string { + if x != nil { + if x, ok := x.ParentFilter.(*ListTasksRequest_RunId); ok { + return x.RunId + } + } + return "" +} + +func (x *ListTasksRequest) GetNamespace() string { + if x != nil { + if x, ok := x.ParentFilter.(*ListTasksRequest_Namespace); ok { + return x.Namespace + } + } + return "" +} + +func (x *ListTasksRequest) GetPageSize() int32 { + if x != nil { + return x.PageSize + } + return 0 +} + +func (x *ListTasksRequest) GetPageToken() string { + if x != nil { + return x.PageToken + } + return "" +} + +func (x *ListTasksRequest) GetFilter() string { + if x != nil { + return x.Filter + } + return "" +} + +func (x *ListTasksRequest) GetOrderBy() string { + if x != nil { + return x.OrderBy + } + return "" +} + +type isListTasksRequest_ParentFilter interface { + isListTasksRequest_ParentFilter() +} + +type ListTasksRequest_ParentId struct { + // List all tasks with this parent task. + ParentId string `protobuf:"bytes,1,opt,name=parent_id,json=parentId,proto3,oneof"` +} + +type ListTasksRequest_RunId struct { + // List all tasks for this run. + RunId string `protobuf:"bytes,2,opt,name=run_id,json=runId,proto3,oneof"` +} + +type ListTasksRequest_Namespace struct { + // List all tasks in this namespace. + // The primary use case for this filter is to detect cache hits. + Namespace string `protobuf:"bytes,3,opt,name=namespace,proto3,oneof"` +} + +func (*ListTasksRequest_ParentId) isListTasksRequest_ParentFilter() {} + +func (*ListTasksRequest_RunId) isListTasksRequest_ParentFilter() {} + +func (*ListTasksRequest_Namespace) isListTasksRequest_ParentFilter() {} + +type ListTasksResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + Tasks []*PipelineTaskDetail `protobuf:"bytes,1,rep,name=tasks,proto3" json:"tasks,omitempty"` + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + TotalSize int32 `protobuf:"varint,3,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ListTasksResponse) Reset() { + *x = ListTasksResponse{} + mi := &file_backend_api_v2beta1_run_proto_msgTypes[24] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ListTasksResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListTasksResponse) ProtoMessage() {} + +func (x *ListTasksResponse) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_run_proto_msgTypes[24] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListTasksResponse.ProtoReflect.Descriptor instead. +func (*ListTasksResponse) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{24} +} + +func (x *ListTasksResponse) GetTasks() []*PipelineTaskDetail { + if x != nil { + return x.Tasks + } + return nil +} + +func (x *ListTasksResponse) GetNextPageToken() string { + if x != nil { + return x.NextPageToken + } + return "" +} + +func (x *ListTasksResponse) GetTotalSize() int32 { + if x != nil { + return x.TotalSize + } + return 0 +} + +type PipelineTaskDetail_TaskPod struct { + state protoimpl.MessageState `protogen:"open.v1"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Uid string `protobuf:"bytes,2,opt,name=uid,proto3" json:"uid,omitempty"` + Type PipelineTaskDetail_TaskPodType `protobuf:"varint,3,opt,name=type,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail_TaskPodType" json:"type,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *PipelineTaskDetail_TaskPod) Reset() { + *x = PipelineTaskDetail_TaskPod{} + mi := &file_backend_api_v2beta1_run_proto_msgTypes[25] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PipelineTaskDetail_TaskPod) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PipelineTaskDetail_TaskPod) ProtoMessage() {} + +func (x *PipelineTaskDetail_TaskPod) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_run_proto_msgTypes[25] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PipelineTaskDetail_TaskPod.ProtoReflect.Descriptor instead. +func (*PipelineTaskDetail_TaskPod) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{4, 0} +} + +func (x *PipelineTaskDetail_TaskPod) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *PipelineTaskDetail_TaskPod) GetUid() string { + if x != nil { + return x.Uid + } + return "" +} + +func (x *PipelineTaskDetail_TaskPod) GetType() PipelineTaskDetail_TaskPodType { + if x != nil { + return x.Type + } + return PipelineTaskDetail_UNSPECIFIED +} + +type PipelineTaskDetail_StatusMetadata struct { + state protoimpl.MessageState `protogen:"open.v1"` + // KFP Backend will populate this field with error messages + // if any are available on a Failed task. + Message string `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` + // Custom status metadata, this can be used to provide + // additional status info for a given task during runtime + // This is currently not utilized by KFP backend. + CustomProperties map[string]*structpb.Value `protobuf:"bytes,2,rep,name=custom_properties,json=customProperties,proto3" json:"custom_properties,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *PipelineTaskDetail_StatusMetadata) Reset() { + *x = PipelineTaskDetail_StatusMetadata{} + mi := &file_backend_api_v2beta1_run_proto_msgTypes[26] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PipelineTaskDetail_StatusMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PipelineTaskDetail_StatusMetadata) ProtoMessage() {} + +func (x *PipelineTaskDetail_StatusMetadata) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_run_proto_msgTypes[26] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PipelineTaskDetail_StatusMetadata.ProtoReflect.Descriptor instead. +func (*PipelineTaskDetail_StatusMetadata) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{4, 1} +} + +func (x *PipelineTaskDetail_StatusMetadata) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +func (x *PipelineTaskDetail_StatusMetadata) GetCustomProperties() map[string]*structpb.Value { + if x != nil { + return x.CustomProperties + } + return nil +} + +// Timestamped representation of a Task state with an optional error. +type PipelineTaskDetail_TaskStatus struct { + state protoimpl.MessageState `protogen:"open.v1"` + UpdateTime *timestamppb.Timestamp `protobuf:"bytes,1,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + State PipelineTaskDetail_TaskState `protobuf:"varint,2,opt,name=state,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail_TaskState" json:"state,omitempty"` + Error *status.Status `protobuf:"bytes,3,opt,name=error,proto3" json:"error,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *PipelineTaskDetail_TaskStatus) Reset() { + *x = PipelineTaskDetail_TaskStatus{} + mi := &file_backend_api_v2beta1_run_proto_msgTypes[27] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PipelineTaskDetail_TaskStatus) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PipelineTaskDetail_TaskStatus) ProtoMessage() {} + +func (x *PipelineTaskDetail_TaskStatus) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_run_proto_msgTypes[27] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PipelineTaskDetail_TaskStatus.ProtoReflect.Descriptor instead. +func (*PipelineTaskDetail_TaskStatus) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{4, 2} +} + +func (x *PipelineTaskDetail_TaskStatus) GetUpdateTime() *timestamppb.Timestamp { + if x != nil { + return x.UpdateTime + } + return nil +} + +func (x *PipelineTaskDetail_TaskStatus) GetState() PipelineTaskDetail_TaskState { + if x != nil { + return x.State + } + return PipelineTaskDetail_RUNTIME_STATE_UNSPECIFIED +} + +func (x *PipelineTaskDetail_TaskStatus) GetError() *status.Status { + if x != nil { + return x.Error + } + return nil +} + +type PipelineTaskDetail_TypeAttributes struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Optional. Applies to type Runtime that is an iteration + IterationIndex *int64 `protobuf:"varint,1,opt,name=iteration_index,json=iterationIndex,proto3,oneof" json:"iteration_index,omitempty"` + // Optional. Applies to type LOOP + IterationCount *int64 `protobuf:"varint,2,opt,name=iteration_count,json=iterationCount,proto3,oneof" json:"iteration_count,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *PipelineTaskDetail_TypeAttributes) Reset() { + *x = PipelineTaskDetail_TypeAttributes{} + mi := &file_backend_api_v2beta1_run_proto_msgTypes[28] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PipelineTaskDetail_TypeAttributes) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PipelineTaskDetail_TypeAttributes) ProtoMessage() {} + +func (x *PipelineTaskDetail_TypeAttributes) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_run_proto_msgTypes[28] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PipelineTaskDetail_TypeAttributes.ProtoReflect.Descriptor instead. +func (*PipelineTaskDetail_TypeAttributes) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{4, 3} +} + +func (x *PipelineTaskDetail_TypeAttributes) GetIterationIndex() int64 { + if x != nil && x.IterationIndex != nil { + return *x.IterationIndex + } + return 0 +} + +func (x *PipelineTaskDetail_TypeAttributes) GetIterationCount() int64 { + if x != nil && x.IterationCount != nil { + return *x.IterationCount + } + return 0 +} + +// A dependent task that requires this one to succeed. +// Represented by either task_id or pod_name. +type PipelineTaskDetail_ChildTask struct { + state protoimpl.MessageState `protogen:"open.v1"` + // System-generated ID of a task. + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *PipelineTaskDetail_ChildTask) Reset() { + *x = PipelineTaskDetail_ChildTask{} + mi := &file_backend_api_v2beta1_run_proto_msgTypes[29] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PipelineTaskDetail_ChildTask) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PipelineTaskDetail_ChildTask) ProtoMessage() {} + +func (x *PipelineTaskDetail_ChildTask) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_run_proto_msgTypes[29] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PipelineTaskDetail_ChildTask.ProtoReflect.Descriptor instead. +func (*PipelineTaskDetail_ChildTask) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{4, 4} +} + +func (x *PipelineTaskDetail_ChildTask) GetTaskId() string { + if x != nil { + return x.TaskId + } + return "" +} + +func (x *PipelineTaskDetail_ChildTask) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +type PipelineTaskDetail_InputOutputs struct { + state protoimpl.MessageState `protogen:"open.v1"` + // For Loops parameters are filled with resolved + // parameterIterator.items + Parameters []*PipelineTaskDetail_InputOutputs_IOParameter `protobuf:"bytes,1,rep,name=parameters,proto3" json:"parameters,omitempty"` + // Output Only. To create Artifacts for a task use + // ArtifactTasks to link artifacts to tasks. + Artifacts []*PipelineTaskDetail_InputOutputs_IOArtifact `protobuf:"bytes,2,rep,name=artifacts,proto3" json:"artifacts,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *PipelineTaskDetail_InputOutputs) Reset() { + *x = PipelineTaskDetail_InputOutputs{} + mi := &file_backend_api_v2beta1_run_proto_msgTypes[30] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PipelineTaskDetail_InputOutputs) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PipelineTaskDetail_InputOutputs) ProtoMessage() {} -func (*PipelineTaskDetail_ChildTask_TaskId) isPipelineTaskDetail_ChildTask_ChildTask() {} +func (x *PipelineTaskDetail_InputOutputs) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_run_proto_msgTypes[30] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PipelineTaskDetail_InputOutputs.ProtoReflect.Descriptor instead. +func (*PipelineTaskDetail_InputOutputs) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{4, 5} +} + +func (x *PipelineTaskDetail_InputOutputs) GetParameters() []*PipelineTaskDetail_InputOutputs_IOParameter { + if x != nil { + return x.Parameters + } + return nil +} + +func (x *PipelineTaskDetail_InputOutputs) GetArtifacts() []*PipelineTaskDetail_InputOutputs_IOArtifact { + if x != nil { + return x.Artifacts + } + return nil +} + +type PipelineTaskDetail_InputOutputs_IOParameter struct { + state protoimpl.MessageState `protogen:"open.v1"` + Value *structpb.Value `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + Type IOType `protobuf:"varint,2,opt,name=type,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.IOType" json:"type,omitempty"` + ParameterKey string `protobuf:"bytes,3,opt,name=parameter_key,json=parameterKey,proto3" json:"parameter_key,omitempty"` + // This field is optional because in the case of + // Input RuntimeValues, ComponentDefaultInputs, + // and Raw Iterator Input there are no producers. + Producer *IOProducer `protobuf:"bytes,4,opt,name=producer,proto3,oneof" json:"producer,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *PipelineTaskDetail_InputOutputs_IOParameter) Reset() { + *x = PipelineTaskDetail_InputOutputs_IOParameter{} + mi := &file_backend_api_v2beta1_run_proto_msgTypes[32] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PipelineTaskDetail_InputOutputs_IOParameter) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PipelineTaskDetail_InputOutputs_IOParameter) ProtoMessage() {} + +func (x *PipelineTaskDetail_InputOutputs_IOParameter) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_run_proto_msgTypes[32] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PipelineTaskDetail_InputOutputs_IOParameter.ProtoReflect.Descriptor instead. +func (*PipelineTaskDetail_InputOutputs_IOParameter) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{4, 5, 0} +} + +func (x *PipelineTaskDetail_InputOutputs_IOParameter) GetValue() *structpb.Value { + if x != nil { + return x.Value + } + return nil +} + +func (x *PipelineTaskDetail_InputOutputs_IOParameter) GetType() IOType { + if x != nil { + return x.Type + } + return IOType_UNSPECIFIED +} + +func (x *PipelineTaskDetail_InputOutputs_IOParameter) GetParameterKey() string { + if x != nil { + return x.ParameterKey + } + return "" +} + +func (x *PipelineTaskDetail_InputOutputs_IOParameter) GetProducer() *IOProducer { + if x != nil { + return x.Producer + } + return nil +} -func (*PipelineTaskDetail_ChildTask_PodName) isPipelineTaskDetail_ChildTask_ChildTask() {} +// Align structure with Executor Input +type PipelineTaskDetail_InputOutputs_IOArtifact struct { + state protoimpl.MessageState `protogen:"open.v1"` + Artifacts []*Artifact `protobuf:"bytes,1,rep,name=artifacts,proto3" json:"artifacts,omitempty"` + Type IOType `protobuf:"varint,2,opt,name=type,proto3,enum=kubeflow.pipelines.backend.api.v2beta1.IOType" json:"type,omitempty"` + ArtifactKey string `protobuf:"bytes,3,opt,name=artifact_key,json=artifactKey,proto3" json:"artifact_key,omitempty"` + Producer *IOProducer `protobuf:"bytes,4,opt,name=producer,proto3" json:"producer,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *PipelineTaskDetail_InputOutputs_IOArtifact) Reset() { + *x = PipelineTaskDetail_InputOutputs_IOArtifact{} + mi := &file_backend_api_v2beta1_run_proto_msgTypes[33] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *PipelineTaskDetail_InputOutputs_IOArtifact) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PipelineTaskDetail_InputOutputs_IOArtifact) ProtoMessage() {} + +func (x *PipelineTaskDetail_InputOutputs_IOArtifact) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_v2beta1_run_proto_msgTypes[33] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PipelineTaskDetail_InputOutputs_IOArtifact.ProtoReflect.Descriptor instead. +func (*PipelineTaskDetail_InputOutputs_IOArtifact) Descriptor() ([]byte, []int) { + return file_backend_api_v2beta1_run_proto_rawDescGZIP(), []int{4, 5, 1} +} + +func (x *PipelineTaskDetail_InputOutputs_IOArtifact) GetArtifacts() []*Artifact { + if x != nil { + return x.Artifacts + } + return nil +} + +func (x *PipelineTaskDetail_InputOutputs_IOArtifact) GetType() IOType { + if x != nil { + return x.Type + } + return IOType_UNSPECIFIED +} + +func (x *PipelineTaskDetail_InputOutputs_IOArtifact) GetArtifactKey() string { + if x != nil { + return x.ArtifactKey + } + return "" +} + +func (x *PipelineTaskDetail_InputOutputs_IOArtifact) GetProducer() *IOProducer { + if x != nil { + return x.Producer + } + return nil +} var File_backend_api_v2beta1_run_proto protoreflect.FileDescriptor const file_backend_api_v2beta1_run_proto_rawDesc = "" + "\n" + - "\x1dbackend/api/v2beta1/run.proto\x12&kubeflow.pipelines.backend.api.v2beta1\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x17google/rpc/status.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\x1a(backend/api/v2beta1/runtime_config.proto\"\xcc\t\n" + + "\x1dbackend/api/v2beta1/run.proto\x12&kubeflow.pipelines.backend.api.v2beta1\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x17google/rpc/status.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\x1a(backend/api/v2beta1/runtime_config.proto\x1a\"backend/api/v2beta1/artifact.proto\"\xb2\v\n" + "\x03Run\x12#\n" + "\rexperiment_id\x18\x01 \x01(\tR\fexperimentId\x12\x15\n" + "\x06run_id\x18\x02 \x01(\tR\x05runId\x12!\n" + @@ -1709,11 +2902,15 @@ const file_backend_api_v2beta1_run_proto_rawDesc = "" + "\vfinished_at\x18\f \x01(\v2\x1a.google.protobuf.TimestampR\n" + "finishedAt\x12J\n" + "\x05state\x18\r \x01(\x0e24.kubeflow.pipelines.backend.api.v2beta1.RuntimeStateR\x05state\x12(\n" + - "\x05error\x18\x0e \x01(\v2\x12.google.rpc.StatusR\x05error\x12S\n" + - "\vrun_details\x18\x0f \x01(\v22.kubeflow.pipelines.backend.api.v2beta1.RunDetailsR\n" + + "\x05error\x18\x0e \x01(\v2\x12.google.rpc.StatusR\x05error\x12W\n" + + "\vrun_details\x18\x0f \x01(\v22.kubeflow.pipelines.backend.api.v2beta1.RunDetailsB\x02\x18\x01R\n" + "runDetails\x12(\n" + "\x10recurring_run_id\x18\x10 \x01(\tR\x0erecurringRunId\x12Z\n" + - "\rstate_history\x18\x11 \x03(\v25.kubeflow.pipelines.backend.api.v2beta1.RuntimeStatusR\fstateHistory\"J\n" + + "\rstate_history\x18\x11 \x03(\v25.kubeflow.pipelines.backend.api.v2beta1.RuntimeStatusR\fstateHistory\x12o\n" + + "\x12pipeline_reference\x18\x13 \x01(\v2@.kubeflow.pipelines.backend.api.v2beta1.PipelineVersionReferenceR\x11pipelineReference\x12\x1d\n" + + "\n" + + "task_count\x18\x14 \x01(\x05R\ttaskCount\x12P\n" + + "\x05tasks\x18\x15 \x03(\v2:.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetailR\x05tasks\"J\n" + "\fStorageState\x12\x1d\n" + "\x19STORAGE_STATE_UNSPECIFIED\x10\x00\x12\r\n" + "\tAVAILABLE\x10\x01\x12\f\n" + @@ -1732,40 +2929,98 @@ const file_backend_api_v2beta1_run_proto_rawDesc = "" + "RunDetails\x12.\n" + "\x13pipeline_context_id\x18\x01 \x01(\x03R\x11pipelineContextId\x125\n" + "\x17pipeline_run_context_id\x18\x02 \x01(\x03R\x14pipelineRunContextId\x12]\n" + - "\ftask_details\x18\x03 \x03(\v2:.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetailR\vtaskDetails\"\x99\n" + - "\n" + - "\x12PipelineTaskDetail\x12\x15\n" + - "\x06run_id\x18\x01 \x01(\tR\x05runId\x12\x17\n" + - "\atask_id\x18\x02 \x01(\tR\x06taskId\x12!\n" + - "\fdisplay_name\x18\x03 \x01(\tR\vdisplayName\x12;\n" + - "\vcreate_time\x18\x04 \x01(\v2\x1a.google.protobuf.TimestampR\n" + + "\ftask_details\x18\x03 \x03(\v2:.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetailR\vtaskDetails\"\xed\x19\n" + + "\x12PipelineTaskDetail\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\x12!\n" + + "\fdisplay_name\x18\x02 \x01(\tR\vdisplayName\x12\x17\n" + + "\atask_id\x18\x03 \x01(\tR\x06taskId\x12\x15\n" + + "\x06run_id\x18\x04 \x01(\tR\x05runId\x12V\n" + + "\x04pods\x18\x05 \x03(\v2B.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TaskPodR\x04pods\x12+\n" + + "\x11cache_fingerprint\x18\x06 \x01(\tR\x10cacheFingerprint\x12;\n" + + "\vcreate_time\x18\a \x01(\v2\x1a.google.protobuf.TimestampR\n" + "createTime\x129\n" + "\n" + - "start_time\x18\x05 \x01(\v2\x1a.google.protobuf.TimestampR\tstartTime\x125\n" + - "\bend_time\x18\x06 \x01(\v2\x1a.google.protobuf.TimestampR\aendTime\x12k\n" + - "\x0fexecutor_detail\x18\a \x01(\v2B.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskExecutorDetailR\x0eexecutorDetail\x12J\n" + - "\x05state\x18\b \x01(\x0e24.kubeflow.pipelines.backend.api.v2beta1.RuntimeStateR\x05state\x12!\n" + - "\fexecution_id\x18\t \x01(\x03R\vexecutionId\x12(\n" + - "\x05error\x18\n" + - " \x01(\v2\x12.google.rpc.StatusR\x05error\x12^\n" + - "\x06inputs\x18\v \x03(\v2F.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputsEntryR\x06inputs\x12a\n" + - "\aoutputs\x18\f \x03(\v2G.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.OutputsEntryR\aoutputs\x12$\n" + - "\x0eparent_task_id\x18\r \x01(\tR\fparentTaskId\x12Z\n" + - "\rstate_history\x18\x0e \x03(\v25.kubeflow.pipelines.backend.api.v2beta1.RuntimeStatusR\fstateHistory\x12\x19\n" + - "\bpod_name\x18\x0f \x01(\tR\apodName\x12e\n" + - "\vchild_tasks\x18\x10 \x03(\v2D.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.ChildTaskR\n" + - "childTasks\x1ao\n" + - "\vInputsEntry\x12\x10\n" + - "\x03key\x18\x01 \x01(\tR\x03key\x12J\n" + - "\x05value\x18\x02 \x01(\v24.kubeflow.pipelines.backend.api.v2beta1.ArtifactListR\x05value:\x028\x01\x1ap\n" + - "\fOutputsEntry\x12\x10\n" + - "\x03key\x18\x01 \x01(\tR\x03key\x12J\n" + - "\x05value\x18\x02 \x01(\v24.kubeflow.pipelines.backend.api.v2beta1.ArtifactListR\x05value:\x028\x01\x1aQ\n" + - "\tChildTask\x12\x19\n" + - "\atask_id\x18\x01 \x01(\tH\x00R\x06taskId\x12\x1b\n" + - "\bpod_name\x18\x02 \x01(\tH\x00R\apodNameB\f\n" + + "start_time\x18\b \x01(\v2\x1a.google.protobuf.TimestampR\tstartTime\x125\n" + + "\bend_time\x18\t \x01(\v2\x1a.google.protobuf.TimestampR\aendTime\x12Z\n" + + "\x05state\x18\n" + + " \x01(\x0e2D.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TaskStateR\x05state\x12r\n" + + "\x0fstatus_metadata\x18\v \x01(\v2I.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.StatusMetadataR\x0estatusMetadata\x12j\n" + + "\rstate_history\x18\f \x03(\v2E.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TaskStatusR\fstateHistory\x12W\n" + + "\x04type\x18\r \x01(\x0e2C.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TaskTypeR\x04type\x12r\n" + + "\x0ftype_attributes\x18\x0e \x01(\v2I.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TypeAttributesR\x0etypeAttributes\x12(\n" + + "\x05error\x18\x0f \x01(\v2\x12.google.rpc.StatusR\x05error\x12)\n" + + "\x0eparent_task_id\x18\x10 \x01(\tH\x00R\fparentTaskId\x88\x01\x01\x12e\n" + + "\vchild_tasks\x18\x11 \x03(\v2D.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.ChildTaskR\n" + + "childTasks\x12_\n" + + "\x06inputs\x18\x12 \x01(\v2G.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputOutputsR\x06inputs\x12a\n" + + "\aoutputs\x18\x13 \x01(\v2G.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputOutputsR\aoutputs\x12\x1d\n" + + "\n" + + "scope_path\x18\x14 \x03(\tR\tscopePath\x1a\x8b\x01\n" + + "\aTaskPod\x12\x12\n" + + "\x04name\x18\x01 \x01(\tR\x04name\x12\x10\n" + + "\x03uid\x18\x02 \x01(\tR\x03uid\x12Z\n" + + "\x04type\x18\x03 \x01(\x0e2F.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TaskPodTypeR\x04type\x1a\x96\x02\n" + + "\x0eStatusMetadata\x12\x18\n" + + "\amessage\x18\x01 \x01(\tR\amessage\x12\x8c\x01\n" + + "\x11custom_properties\x18\x02 \x03(\v2_.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.StatusMetadata.CustomPropertiesEntryR\x10customProperties\x1a[\n" + + "\x15CustomPropertiesEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12,\n" + + "\x05value\x18\x02 \x01(\v2\x16.google.protobuf.ValueR\x05value:\x028\x01\x1a\xcf\x01\n" + + "\n" + + "TaskStatus\x12;\n" + + "\vupdate_time\x18\x01 \x01(\v2\x1a.google.protobuf.TimestampR\n" + + "updateTime\x12Z\n" + + "\x05state\x18\x02 \x01(\x0e2D.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TaskStateR\x05state\x12(\n" + + "\x05error\x18\x03 \x01(\v2\x12.google.rpc.StatusR\x05error\x1a\x94\x01\n" + + "\x0eTypeAttributes\x12,\n" + + "\x0fiteration_index\x18\x01 \x01(\x03H\x00R\x0eiterationIndex\x88\x01\x01\x12,\n" + + "\x0fiteration_count\x18\x02 \x01(\x03H\x01R\x0eiterationCount\x88\x01\x01B\x12\n" + + "\x10_iteration_indexB\x12\n" + + "\x10_iteration_count\x1a8\n" + + "\tChildTask\x12\x17\n" + + "\atask_id\x18\x01 \x01(\tR\x06taskId\x12\x12\n" + + "\x04name\x18\x02 \x01(\tR\x04name\x1a\x94\x06\n" + + "\fInputOutputs\x12s\n" + + "\n" + + "parameters\x18\x01 \x03(\v2S.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputOutputs.IOParameterR\n" + + "parameters\x12p\n" + + "\tartifacts\x18\x02 \x03(\v2R.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputOutputs.IOArtifactR\tartifacts\x1a\x86\x02\n" + + "\vIOParameter\x12,\n" + + "\x05value\x18\x01 \x01(\v2\x16.google.protobuf.ValueR\x05value\x12B\n" + + "\x04type\x18\x02 \x01(\x0e2..kubeflow.pipelines.backend.api.v2beta1.IOTypeR\x04type\x12#\n" + + "\rparameter_key\x18\x03 \x01(\tR\fparameterKey\x12S\n" + + "\bproducer\x18\x04 \x01(\v22.kubeflow.pipelines.backend.api.v2beta1.IOProducerH\x00R\bproducer\x88\x01\x01B\v\n" + + "\t_producer\x1a\x93\x02\n" + "\n" + - "child_task\"\xd6\x01\n" + + "IOArtifact\x12N\n" + + "\tartifacts\x18\x01 \x03(\v20.kubeflow.pipelines.backend.api.v2beta1.ArtifactR\tartifacts\x12B\n" + + "\x04type\x18\x02 \x01(\x0e2..kubeflow.pipelines.backend.api.v2beta1.IOTypeR\x04type\x12!\n" + + "\fartifact_key\x18\x03 \x01(\tR\vartifactKey\x12N\n" + + "\bproducer\x18\x04 \x01(\v22.kubeflow.pipelines.backend.api.v2beta1.IOProducerR\bproducer\"8\n" + + "\vTaskPodType\x12\x0f\n" + + "\vUNSPECIFIED\x10\x00\x12\n" + + "\n" + + "\x06DRIVER\x10\x01\x12\f\n" + + "\bEXECUTOR\x10\x02\"k\n" + + "\tTaskState\x12\x1d\n" + + "\x19RUNTIME_STATE_UNSPECIFIED\x10\x00\x12\v\n" + + "\aRUNNING\x10\x01\x12\r\n" + + "\tSUCCEEDED\x10\x02\x12\v\n" + + "\aSKIPPED\x10\x03\x12\n" + + "\n" + + "\x06FAILED\x10\x04\x12\n" + + "\n" + + "\x06CACHED\x10\x05\"y\n" + + "\bTaskType\x12\b\n" + + "\x04ROOT\x10\x00\x12\v\n" + + "\aRUNTIME\x10\x01\x12\x14\n" + + "\x10CONDITION_BRANCH\x10\x02\x12\r\n" + + "\tCONDITION\x10\x03\x12\b\n" + + "\x04LOOP\x10\x04\x12\x10\n" + + "\fEXIT_HANDLER\x10\x05\x12\f\n" + + "\bIMPORTER\x10\x06\x12\a\n" + + "\x03DAG\x10\aB\x11\n" + + "\x0f_parent_task_id\"\xd6\x01\n" + "\x1aPipelineTaskExecutorDetail\x12\x19\n" + "\bmain_job\x18\x01 \x01(\tR\amainJob\x121\n" + "\x15pre_caching_check_job\x18\x02 \x01(\tR\x12preCachingCheckJob\x12(\n" + @@ -1775,10 +3030,15 @@ const file_backend_api_v2beta1_run_proto_rawDesc = "" + "\fartifact_ids\x18\x01 \x03(\x03R\vartifactIds\"z\n" + "\x10CreateRunRequest\x12'\n" + "\rexperiment_id\x18\x01 \x01(\tB\x02\x18\x01R\fexperimentId\x12=\n" + - "\x03run\x18\x02 \x01(\v2+.kubeflow.pipelines.backend.api.v2beta1.RunR\x03run\"O\n" + + "\x03run\x18\x02 \x01(\v2+.kubeflow.pipelines.backend.api.v2beta1.RunR\x03run\"\xd4\x01\n" + "\rGetRunRequest\x12'\n" + "\rexperiment_id\x18\x01 \x01(\tB\x02\x18\x01R\fexperimentId\x12\x15\n" + - "\x06run_id\x18\x02 \x01(\tR\x05runId\"\xc1\x01\n" + + "\x06run_id\x18\x02 \x01(\tR\x05runId\x12W\n" + + "\x04view\x18\x03 \x01(\x0e2>.kubeflow.pipelines.backend.api.v2beta1.GetRunRequest.ViewModeH\x00R\x04view\x88\x01\x01\"!\n" + + "\bViewMode\x12\v\n" + + "\aDEFAULT\x10\x00\x12\b\n" + + "\x04FULL\x10\x01B\a\n" + + "\x05_view\"\xc8\x02\n" + "\x0fListRunsRequest\x12\x1c\n" + "\tnamespace\x18\x01 \x01(\tR\tnamespace\x12#\n" + "\rexperiment_id\x18\x02 \x01(\tR\fexperimentId\x12\x1d\n" + @@ -1786,7 +3046,12 @@ const file_backend_api_v2beta1_run_proto_rawDesc = "" + "page_token\x18\x03 \x01(\tR\tpageToken\x12\x1b\n" + "\tpage_size\x18\x04 \x01(\x05R\bpageSize\x12\x17\n" + "\asort_by\x18\x05 \x01(\tR\x06sortBy\x12\x16\n" + - "\x06filter\x18\x06 \x01(\tR\x06filter\"U\n" + + "\x06filter\x18\x06 \x01(\tR\x06filter\x12Y\n" + + "\x04view\x18\a \x01(\x0e2@.kubeflow.pipelines.backend.api.v2beta1.ListRunsRequest.ViewModeH\x00R\x04view\x88\x01\x01\"!\n" + + "\bViewMode\x12\v\n" + + "\aDEFAULT\x10\x00\x12\b\n" + + "\x04FULL\x10\x01B\a\n" + + "\x05_view\"U\n" + "\x13TerminateRunRequest\x12'\n" + "\rexperiment_id\x18\x01 \x01(\tB\x02\x18\x01R\fexperimentId\x12\x15\n" + "\x06run_id\x18\x02 \x01(\tR\x05runId\"\x9a\x01\n" + @@ -1813,7 +3078,41 @@ const file_backend_api_v2beta1_run_proto_rawDesc = "" + "\x04data\x18\x01 \x01(\fR\x04data\"Q\n" + "\x0fRetryRunRequest\x12'\n" + "\rexperiment_id\x18\x01 \x01(\tB\x02\x18\x01R\fexperimentId\x12\x15\n" + - "\x06run_id\x18\x02 \x01(\tR\x05runId*\x98\x01\n" + + "\x06run_id\x18\x02 \x01(\tR\x05runId\"c\n" + + "\x11CreateTaskRequest\x12N\n" + + "\x04task\x18\x01 \x01(\v2:.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetailR\x04task\"|\n" + + "\x11UpdateTaskRequest\x12\x17\n" + + "\atask_id\x18\x01 \x01(\tR\x06taskId\x12N\n" + + "\x04task\x18\x02 \x01(\v2:.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetailR\x04task\"\xef\x01\n" + + "\x16UpdateTasksBulkRequest\x12_\n" + + "\x05tasks\x18\x01 \x03(\v2I.kubeflow.pipelines.backend.api.v2beta1.UpdateTasksBulkRequest.TasksEntryR\x05tasks\x1at\n" + + "\n" + + "TasksEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12P\n" + + "\x05value\x18\x02 \x01(\v2:.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetailR\x05value:\x028\x01\"\xf1\x01\n" + + "\x17UpdateTasksBulkResponse\x12`\n" + + "\x05tasks\x18\x01 \x03(\v2J.kubeflow.pipelines.backend.api.v2beta1.UpdateTasksBulkResponse.TasksEntryR\x05tasks\x1at\n" + + "\n" + + "TasksEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12P\n" + + "\x05value\x18\x02 \x01(\v2:.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetailR\x05value:\x028\x01\")\n" + + "\x0eGetTaskRequest\x12\x17\n" + + "\atask_id\x18\x01 \x01(\tR\x06taskId\"\xea\x01\n" + + "\x10ListTasksRequest\x12\x1d\n" + + "\tparent_id\x18\x01 \x01(\tH\x00R\bparentId\x12\x17\n" + + "\x06run_id\x18\x02 \x01(\tH\x00R\x05runId\x12\x1e\n" + + "\tnamespace\x18\x03 \x01(\tH\x00R\tnamespace\x12\x1b\n" + + "\tpage_size\x18\x04 \x01(\x05R\bpageSize\x12\x1d\n" + + "\n" + + "page_token\x18\x05 \x01(\tR\tpageToken\x12\x16\n" + + "\x06filter\x18\x06 \x01(\tR\x06filter\x12\x19\n" + + "\border_by\x18\a \x01(\tR\aorderByB\x0f\n" + + "\rparent_filter\"\xac\x01\n" + + "\x11ListTasksResponse\x12P\n" + + "\x05tasks\x18\x01 \x03(\v2:.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetailR\x05tasks\x12&\n" + + "\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\x12\x1d\n" + + "\n" + + "total_size\x18\x03 \x01(\x05R\ttotalSize*\x98\x01\n" + "\fRuntimeState\x12\x1d\n" + "\x19RUNTIME_STATE_UNSPECIFIED\x10\x00\x12\v\n" + "\aPENDING\x10\x01\x12\v\n" + @@ -1825,8 +3124,7 @@ const file_backend_api_v2beta1_run_proto_rawDesc = "" + "\tCANCELING\x10\x06\x12\f\n" + "\bCANCELED\x10\a\x12\n" + "\n" + - "\x06PAUSED\x10\b2\xf9\n" + - "\n" + + "\x06PAUSED\x10\b2\x80\x14\n" + "\n" + "RunService\x12\x93\x01\n" + "\tCreateRun\x128.kubeflow.pipelines.backend.api.v2beta1.CreateRunRequest\x1a+.kubeflow.pipelines.backend.api.v2beta1.Run\"\x1f\x82\xd3\xe4\x93\x02\x19:\x03run\"\x12/apis/v2beta1/runs\x12\x91\x01\n" + @@ -1838,7 +3136,25 @@ const file_backend_api_v2beta1_run_proto_rawDesc = "" + "\tDeleteRun\x128.kubeflow.pipelines.backend.api.v2beta1.DeleteRunRequest\x1a\x16.google.protobuf.Empty\"#\x82\xd3\xe4\x93\x02\x1d*\x1b/apis/v2beta1/runs/{run_id}\x12\xdd\x01\n" + "\fReadArtifact\x12;.kubeflow.pipelines.backend.api.v2beta1.ReadArtifactRequest\x1a<.kubeflow.pipelines.backend.api.v2beta1.ReadArtifactResponse\"R\x82\xd3\xe4\x93\x02L\x12J/apis/v2beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read\x12\x92\x01\n" + "\fTerminateRun\x12;.kubeflow.pipelines.backend.api.v2beta1.TerminateRunRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02'\"%/apis/v2beta1/runs/{run_id}:terminate\x12\x86\x01\n" + - "\bRetryRun\x127.kubeflow.pipelines.backend.api.v2beta1.RetryRunRequest\x1a\x16.google.protobuf.Empty\")\x82\xd3\xe4\x93\x02#\"!/apis/v2beta1/runs/{run_id}:retryB\x98\x01\x92AX*\x02\x01\x02R#\n" + + "\bRetryRun\x127.kubeflow.pipelines.backend.api.v2beta1.RetryRunRequest\x1a\x16.google.protobuf.Empty\")\x82\xd3\xe4\x93\x02#\"!/apis/v2beta1/runs/{run_id}:retry\x12\xd7\x01\n" + + "\n" + + "CreateTask\x129.kubeflow.pipelines.backend.api.v2beta1.CreateTaskRequest\x1a:.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail\"R\x92A.\n" + + "\n" + + "RunService\x12\x13Creates a new task.*\vcreate_task\x82\xd3\xe4\x93\x02\x1b:\x04task\"\x13/apis/v2beta1/tasks\x12\xe7\x01\n" + + "\n" + + "UpdateTask\x129.kubeflow.pipelines.backend.api.v2beta1.UpdateTaskRequest\x1a:.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail\"b\x92A4\n" + + "\n" + + "RunService\x12\x19Updates an existing task.*\vupdate_task\x82\xd3\xe4\x93\x02%:\x04task2\x1d/apis/v2beta1/tasks/{task_id}\x12\x82\x02\n" + + "\x0fUpdateTasksBulk\x12>.kubeflow.pipelines.backend.api.v2beta1.UpdateTasksBulkRequest\x1a?.kubeflow.pipelines.backend.api.v2beta1.UpdateTasksBulkResponse\"n\x92AA\n" + + "\n" + + "RunService\x12\x1fUpdates multiple tasks in bulk.*\x12batch_update_tasks\x82\xd3\xe4\x93\x02$:\x01*\"\x1f/apis/v2beta1/tasks:batchUpdate\x12\xda\x01\n" + + "\aGetTask\x126.kubeflow.pipelines.backend.api.v2beta1.GetTaskRequest\x1a:.kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail\"[\x92A3\n" + + "\n" + + "RunService\x12\x1bGets a specific task by ID.*\bget_task\x82\xd3\xe4\x93\x02\x1f\x12\x1d/apis/v2beta1/tasks/{task_id}\x12\xde\x01\n" + + "\tListTasks\x128.kubeflow.pipelines.backend.api.v2beta1.ListTasksRequest\x1a9.kubeflow.pipelines.backend.api.v2beta1.ListTasksResponse\"\\\x92A>\n" + + "\n" + + "RunService\x12$Lists tasks with optional filtering.*\n" + + "list_tasks\x82\xd3\xe4\x93\x02\x15\x12\x13/apis/v2beta1/tasksB\x98\x01\x92AX*\x02\x01\x02R#\n" + "\adefault\x12\x18\x12\x16\n" + "\x14\x1a\x12.google.rpc.StatusZ\x1f\n" + "\x1d\n" + @@ -1859,91 +3175,151 @@ func file_backend_api_v2beta1_run_proto_rawDescGZIP() []byte { return file_backend_api_v2beta1_run_proto_rawDescData } -var file_backend_api_v2beta1_run_proto_enumTypes = make([]protoimpl.EnumInfo, 2) -var file_backend_api_v2beta1_run_proto_msgTypes = make([]protoimpl.MessageInfo, 21) +var file_backend_api_v2beta1_run_proto_enumTypes = make([]protoimpl.EnumInfo, 7) +var file_backend_api_v2beta1_run_proto_msgTypes = make([]protoimpl.MessageInfo, 36) var file_backend_api_v2beta1_run_proto_goTypes = []any{ - (RuntimeState)(0), // 0: kubeflow.pipelines.backend.api.v2beta1.RuntimeState - (Run_StorageState)(0), // 1: kubeflow.pipelines.backend.api.v2beta1.Run.StorageState - (*Run)(nil), // 2: kubeflow.pipelines.backend.api.v2beta1.Run - (*PipelineVersionReference)(nil), // 3: kubeflow.pipelines.backend.api.v2beta1.PipelineVersionReference - (*RuntimeStatus)(nil), // 4: kubeflow.pipelines.backend.api.v2beta1.RuntimeStatus - (*RunDetails)(nil), // 5: kubeflow.pipelines.backend.api.v2beta1.RunDetails - (*PipelineTaskDetail)(nil), // 6: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail - (*PipelineTaskExecutorDetail)(nil), // 7: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskExecutorDetail - (*ArtifactList)(nil), // 8: kubeflow.pipelines.backend.api.v2beta1.ArtifactList - (*CreateRunRequest)(nil), // 9: kubeflow.pipelines.backend.api.v2beta1.CreateRunRequest - (*GetRunRequest)(nil), // 10: kubeflow.pipelines.backend.api.v2beta1.GetRunRequest - (*ListRunsRequest)(nil), // 11: kubeflow.pipelines.backend.api.v2beta1.ListRunsRequest - (*TerminateRunRequest)(nil), // 12: kubeflow.pipelines.backend.api.v2beta1.TerminateRunRequest - (*ListRunsResponse)(nil), // 13: kubeflow.pipelines.backend.api.v2beta1.ListRunsResponse - (*ArchiveRunRequest)(nil), // 14: kubeflow.pipelines.backend.api.v2beta1.ArchiveRunRequest - (*UnarchiveRunRequest)(nil), // 15: kubeflow.pipelines.backend.api.v2beta1.UnarchiveRunRequest - (*DeleteRunRequest)(nil), // 16: kubeflow.pipelines.backend.api.v2beta1.DeleteRunRequest - (*ReadArtifactRequest)(nil), // 17: kubeflow.pipelines.backend.api.v2beta1.ReadArtifactRequest - (*ReadArtifactResponse)(nil), // 18: kubeflow.pipelines.backend.api.v2beta1.ReadArtifactResponse - (*RetryRunRequest)(nil), // 19: kubeflow.pipelines.backend.api.v2beta1.RetryRunRequest - nil, // 20: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputsEntry - nil, // 21: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.OutputsEntry - (*PipelineTaskDetail_ChildTask)(nil), // 22: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.ChildTask - (*structpb.Struct)(nil), // 23: google.protobuf.Struct - (*RuntimeConfig)(nil), // 24: kubeflow.pipelines.backend.api.v2beta1.RuntimeConfig - (*timestamppb.Timestamp)(nil), // 25: google.protobuf.Timestamp - (*status.Status)(nil), // 26: google.rpc.Status - (*emptypb.Empty)(nil), // 27: google.protobuf.Empty + (RuntimeState)(0), // 0: kubeflow.pipelines.backend.api.v2beta1.RuntimeState + (Run_StorageState)(0), // 1: kubeflow.pipelines.backend.api.v2beta1.Run.StorageState + (PipelineTaskDetail_TaskPodType)(0), // 2: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TaskPodType + (PipelineTaskDetail_TaskState)(0), // 3: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TaskState + (PipelineTaskDetail_TaskType)(0), // 4: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TaskType + (GetRunRequest_ViewMode)(0), // 5: kubeflow.pipelines.backend.api.v2beta1.GetRunRequest.ViewMode + (ListRunsRequest_ViewMode)(0), // 6: kubeflow.pipelines.backend.api.v2beta1.ListRunsRequest.ViewMode + (*Run)(nil), // 7: kubeflow.pipelines.backend.api.v2beta1.Run + (*PipelineVersionReference)(nil), // 8: kubeflow.pipelines.backend.api.v2beta1.PipelineVersionReference + (*RuntimeStatus)(nil), // 9: kubeflow.pipelines.backend.api.v2beta1.RuntimeStatus + (*RunDetails)(nil), // 10: kubeflow.pipelines.backend.api.v2beta1.RunDetails + (*PipelineTaskDetail)(nil), // 11: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail + (*PipelineTaskExecutorDetail)(nil), // 12: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskExecutorDetail + (*ArtifactList)(nil), // 13: kubeflow.pipelines.backend.api.v2beta1.ArtifactList + (*CreateRunRequest)(nil), // 14: kubeflow.pipelines.backend.api.v2beta1.CreateRunRequest + (*GetRunRequest)(nil), // 15: kubeflow.pipelines.backend.api.v2beta1.GetRunRequest + (*ListRunsRequest)(nil), // 16: kubeflow.pipelines.backend.api.v2beta1.ListRunsRequest + (*TerminateRunRequest)(nil), // 17: kubeflow.pipelines.backend.api.v2beta1.TerminateRunRequest + (*ListRunsResponse)(nil), // 18: kubeflow.pipelines.backend.api.v2beta1.ListRunsResponse + (*ArchiveRunRequest)(nil), // 19: kubeflow.pipelines.backend.api.v2beta1.ArchiveRunRequest + (*UnarchiveRunRequest)(nil), // 20: kubeflow.pipelines.backend.api.v2beta1.UnarchiveRunRequest + (*DeleteRunRequest)(nil), // 21: kubeflow.pipelines.backend.api.v2beta1.DeleteRunRequest + (*ReadArtifactRequest)(nil), // 22: kubeflow.pipelines.backend.api.v2beta1.ReadArtifactRequest + (*ReadArtifactResponse)(nil), // 23: kubeflow.pipelines.backend.api.v2beta1.ReadArtifactResponse + (*RetryRunRequest)(nil), // 24: kubeflow.pipelines.backend.api.v2beta1.RetryRunRequest + (*CreateTaskRequest)(nil), // 25: kubeflow.pipelines.backend.api.v2beta1.CreateTaskRequest + (*UpdateTaskRequest)(nil), // 26: kubeflow.pipelines.backend.api.v2beta1.UpdateTaskRequest + (*UpdateTasksBulkRequest)(nil), // 27: kubeflow.pipelines.backend.api.v2beta1.UpdateTasksBulkRequest + (*UpdateTasksBulkResponse)(nil), // 28: kubeflow.pipelines.backend.api.v2beta1.UpdateTasksBulkResponse + (*GetTaskRequest)(nil), // 29: kubeflow.pipelines.backend.api.v2beta1.GetTaskRequest + (*ListTasksRequest)(nil), // 30: kubeflow.pipelines.backend.api.v2beta1.ListTasksRequest + (*ListTasksResponse)(nil), // 31: kubeflow.pipelines.backend.api.v2beta1.ListTasksResponse + (*PipelineTaskDetail_TaskPod)(nil), // 32: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TaskPod + (*PipelineTaskDetail_StatusMetadata)(nil), // 33: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.StatusMetadata + (*PipelineTaskDetail_TaskStatus)(nil), // 34: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TaskStatus + (*PipelineTaskDetail_TypeAttributes)(nil), // 35: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TypeAttributes + (*PipelineTaskDetail_ChildTask)(nil), // 36: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.ChildTask + (*PipelineTaskDetail_InputOutputs)(nil), // 37: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputOutputs + nil, // 38: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.StatusMetadata.CustomPropertiesEntry + (*PipelineTaskDetail_InputOutputs_IOParameter)(nil), // 39: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputOutputs.IOParameter + (*PipelineTaskDetail_InputOutputs_IOArtifact)(nil), // 40: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputOutputs.IOArtifact + nil, // 41: kubeflow.pipelines.backend.api.v2beta1.UpdateTasksBulkRequest.TasksEntry + nil, // 42: kubeflow.pipelines.backend.api.v2beta1.UpdateTasksBulkResponse.TasksEntry + (*structpb.Struct)(nil), // 43: google.protobuf.Struct + (*RuntimeConfig)(nil), // 44: kubeflow.pipelines.backend.api.v2beta1.RuntimeConfig + (*timestamppb.Timestamp)(nil), // 45: google.protobuf.Timestamp + (*status.Status)(nil), // 46: google.rpc.Status + (*structpb.Value)(nil), // 47: google.protobuf.Value + (IOType)(0), // 48: kubeflow.pipelines.backend.api.v2beta1.IOType + (*IOProducer)(nil), // 49: kubeflow.pipelines.backend.api.v2beta1.IOProducer + (*Artifact)(nil), // 50: kubeflow.pipelines.backend.api.v2beta1.Artifact + (*emptypb.Empty)(nil), // 51: google.protobuf.Empty } var file_backend_api_v2beta1_run_proto_depIdxs = []int32{ 1, // 0: kubeflow.pipelines.backend.api.v2beta1.Run.storage_state:type_name -> kubeflow.pipelines.backend.api.v2beta1.Run.StorageState - 23, // 1: kubeflow.pipelines.backend.api.v2beta1.Run.pipeline_spec:type_name -> google.protobuf.Struct - 3, // 2: kubeflow.pipelines.backend.api.v2beta1.Run.pipeline_version_reference:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineVersionReference - 24, // 3: kubeflow.pipelines.backend.api.v2beta1.Run.runtime_config:type_name -> kubeflow.pipelines.backend.api.v2beta1.RuntimeConfig - 25, // 4: kubeflow.pipelines.backend.api.v2beta1.Run.created_at:type_name -> google.protobuf.Timestamp - 25, // 5: kubeflow.pipelines.backend.api.v2beta1.Run.scheduled_at:type_name -> google.protobuf.Timestamp - 25, // 6: kubeflow.pipelines.backend.api.v2beta1.Run.finished_at:type_name -> google.protobuf.Timestamp + 43, // 1: kubeflow.pipelines.backend.api.v2beta1.Run.pipeline_spec:type_name -> google.protobuf.Struct + 8, // 2: kubeflow.pipelines.backend.api.v2beta1.Run.pipeline_version_reference:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineVersionReference + 44, // 3: kubeflow.pipelines.backend.api.v2beta1.Run.runtime_config:type_name -> kubeflow.pipelines.backend.api.v2beta1.RuntimeConfig + 45, // 4: kubeflow.pipelines.backend.api.v2beta1.Run.created_at:type_name -> google.protobuf.Timestamp + 45, // 5: kubeflow.pipelines.backend.api.v2beta1.Run.scheduled_at:type_name -> google.protobuf.Timestamp + 45, // 6: kubeflow.pipelines.backend.api.v2beta1.Run.finished_at:type_name -> google.protobuf.Timestamp 0, // 7: kubeflow.pipelines.backend.api.v2beta1.Run.state:type_name -> kubeflow.pipelines.backend.api.v2beta1.RuntimeState - 26, // 8: kubeflow.pipelines.backend.api.v2beta1.Run.error:type_name -> google.rpc.Status - 5, // 9: kubeflow.pipelines.backend.api.v2beta1.Run.run_details:type_name -> kubeflow.pipelines.backend.api.v2beta1.RunDetails - 4, // 10: kubeflow.pipelines.backend.api.v2beta1.Run.state_history:type_name -> kubeflow.pipelines.backend.api.v2beta1.RuntimeStatus - 25, // 11: kubeflow.pipelines.backend.api.v2beta1.RuntimeStatus.update_time:type_name -> google.protobuf.Timestamp - 0, // 12: kubeflow.pipelines.backend.api.v2beta1.RuntimeStatus.state:type_name -> kubeflow.pipelines.backend.api.v2beta1.RuntimeState - 26, // 13: kubeflow.pipelines.backend.api.v2beta1.RuntimeStatus.error:type_name -> google.rpc.Status - 6, // 14: kubeflow.pipelines.backend.api.v2beta1.RunDetails.task_details:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail - 25, // 15: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.create_time:type_name -> google.protobuf.Timestamp - 25, // 16: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.start_time:type_name -> google.protobuf.Timestamp - 25, // 17: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.end_time:type_name -> google.protobuf.Timestamp - 7, // 18: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.executor_detail:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskExecutorDetail - 0, // 19: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.state:type_name -> kubeflow.pipelines.backend.api.v2beta1.RuntimeState - 26, // 20: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.error:type_name -> google.rpc.Status - 20, // 21: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.inputs:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputsEntry - 21, // 22: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.outputs:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.OutputsEntry - 4, // 23: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.state_history:type_name -> kubeflow.pipelines.backend.api.v2beta1.RuntimeStatus - 22, // 24: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.child_tasks:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.ChildTask - 2, // 25: kubeflow.pipelines.backend.api.v2beta1.CreateRunRequest.run:type_name -> kubeflow.pipelines.backend.api.v2beta1.Run - 2, // 26: kubeflow.pipelines.backend.api.v2beta1.ListRunsResponse.runs:type_name -> kubeflow.pipelines.backend.api.v2beta1.Run - 8, // 27: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputsEntry.value:type_name -> kubeflow.pipelines.backend.api.v2beta1.ArtifactList - 8, // 28: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.OutputsEntry.value:type_name -> kubeflow.pipelines.backend.api.v2beta1.ArtifactList - 9, // 29: kubeflow.pipelines.backend.api.v2beta1.RunService.CreateRun:input_type -> kubeflow.pipelines.backend.api.v2beta1.CreateRunRequest - 10, // 30: kubeflow.pipelines.backend.api.v2beta1.RunService.GetRun:input_type -> kubeflow.pipelines.backend.api.v2beta1.GetRunRequest - 11, // 31: kubeflow.pipelines.backend.api.v2beta1.RunService.ListRuns:input_type -> kubeflow.pipelines.backend.api.v2beta1.ListRunsRequest - 14, // 32: kubeflow.pipelines.backend.api.v2beta1.RunService.ArchiveRun:input_type -> kubeflow.pipelines.backend.api.v2beta1.ArchiveRunRequest - 15, // 33: kubeflow.pipelines.backend.api.v2beta1.RunService.UnarchiveRun:input_type -> kubeflow.pipelines.backend.api.v2beta1.UnarchiveRunRequest - 16, // 34: kubeflow.pipelines.backend.api.v2beta1.RunService.DeleteRun:input_type -> kubeflow.pipelines.backend.api.v2beta1.DeleteRunRequest - 17, // 35: kubeflow.pipelines.backend.api.v2beta1.RunService.ReadArtifact:input_type -> kubeflow.pipelines.backend.api.v2beta1.ReadArtifactRequest - 12, // 36: kubeflow.pipelines.backend.api.v2beta1.RunService.TerminateRun:input_type -> kubeflow.pipelines.backend.api.v2beta1.TerminateRunRequest - 19, // 37: kubeflow.pipelines.backend.api.v2beta1.RunService.RetryRun:input_type -> kubeflow.pipelines.backend.api.v2beta1.RetryRunRequest - 2, // 38: kubeflow.pipelines.backend.api.v2beta1.RunService.CreateRun:output_type -> kubeflow.pipelines.backend.api.v2beta1.Run - 2, // 39: kubeflow.pipelines.backend.api.v2beta1.RunService.GetRun:output_type -> kubeflow.pipelines.backend.api.v2beta1.Run - 13, // 40: kubeflow.pipelines.backend.api.v2beta1.RunService.ListRuns:output_type -> kubeflow.pipelines.backend.api.v2beta1.ListRunsResponse - 27, // 41: kubeflow.pipelines.backend.api.v2beta1.RunService.ArchiveRun:output_type -> google.protobuf.Empty - 27, // 42: kubeflow.pipelines.backend.api.v2beta1.RunService.UnarchiveRun:output_type -> google.protobuf.Empty - 27, // 43: kubeflow.pipelines.backend.api.v2beta1.RunService.DeleteRun:output_type -> google.protobuf.Empty - 18, // 44: kubeflow.pipelines.backend.api.v2beta1.RunService.ReadArtifact:output_type -> kubeflow.pipelines.backend.api.v2beta1.ReadArtifactResponse - 27, // 45: kubeflow.pipelines.backend.api.v2beta1.RunService.TerminateRun:output_type -> google.protobuf.Empty - 27, // 46: kubeflow.pipelines.backend.api.v2beta1.RunService.RetryRun:output_type -> google.protobuf.Empty - 38, // [38:47] is the sub-list for method output_type - 29, // [29:38] is the sub-list for method input_type - 29, // [29:29] is the sub-list for extension type_name - 29, // [29:29] is the sub-list for extension extendee - 0, // [0:29] is the sub-list for field type_name + 46, // 8: kubeflow.pipelines.backend.api.v2beta1.Run.error:type_name -> google.rpc.Status + 10, // 9: kubeflow.pipelines.backend.api.v2beta1.Run.run_details:type_name -> kubeflow.pipelines.backend.api.v2beta1.RunDetails + 9, // 10: kubeflow.pipelines.backend.api.v2beta1.Run.state_history:type_name -> kubeflow.pipelines.backend.api.v2beta1.RuntimeStatus + 8, // 11: kubeflow.pipelines.backend.api.v2beta1.Run.pipeline_reference:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineVersionReference + 11, // 12: kubeflow.pipelines.backend.api.v2beta1.Run.tasks:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail + 45, // 13: kubeflow.pipelines.backend.api.v2beta1.RuntimeStatus.update_time:type_name -> google.protobuf.Timestamp + 0, // 14: kubeflow.pipelines.backend.api.v2beta1.RuntimeStatus.state:type_name -> kubeflow.pipelines.backend.api.v2beta1.RuntimeState + 46, // 15: kubeflow.pipelines.backend.api.v2beta1.RuntimeStatus.error:type_name -> google.rpc.Status + 11, // 16: kubeflow.pipelines.backend.api.v2beta1.RunDetails.task_details:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail + 32, // 17: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.pods:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TaskPod + 45, // 18: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.create_time:type_name -> google.protobuf.Timestamp + 45, // 19: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.start_time:type_name -> google.protobuf.Timestamp + 45, // 20: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.end_time:type_name -> google.protobuf.Timestamp + 3, // 21: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.state:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TaskState + 33, // 22: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.status_metadata:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.StatusMetadata + 34, // 23: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.state_history:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TaskStatus + 4, // 24: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.type:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TaskType + 35, // 25: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.type_attributes:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TypeAttributes + 46, // 26: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.error:type_name -> google.rpc.Status + 36, // 27: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.child_tasks:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.ChildTask + 37, // 28: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.inputs:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputOutputs + 37, // 29: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.outputs:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputOutputs + 7, // 30: kubeflow.pipelines.backend.api.v2beta1.CreateRunRequest.run:type_name -> kubeflow.pipelines.backend.api.v2beta1.Run + 5, // 31: kubeflow.pipelines.backend.api.v2beta1.GetRunRequest.view:type_name -> kubeflow.pipelines.backend.api.v2beta1.GetRunRequest.ViewMode + 6, // 32: kubeflow.pipelines.backend.api.v2beta1.ListRunsRequest.view:type_name -> kubeflow.pipelines.backend.api.v2beta1.ListRunsRequest.ViewMode + 7, // 33: kubeflow.pipelines.backend.api.v2beta1.ListRunsResponse.runs:type_name -> kubeflow.pipelines.backend.api.v2beta1.Run + 11, // 34: kubeflow.pipelines.backend.api.v2beta1.CreateTaskRequest.task:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail + 11, // 35: kubeflow.pipelines.backend.api.v2beta1.UpdateTaskRequest.task:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail + 41, // 36: kubeflow.pipelines.backend.api.v2beta1.UpdateTasksBulkRequest.tasks:type_name -> kubeflow.pipelines.backend.api.v2beta1.UpdateTasksBulkRequest.TasksEntry + 42, // 37: kubeflow.pipelines.backend.api.v2beta1.UpdateTasksBulkResponse.tasks:type_name -> kubeflow.pipelines.backend.api.v2beta1.UpdateTasksBulkResponse.TasksEntry + 11, // 38: kubeflow.pipelines.backend.api.v2beta1.ListTasksResponse.tasks:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail + 2, // 39: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TaskPod.type:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TaskPodType + 38, // 40: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.StatusMetadata.custom_properties:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.StatusMetadata.CustomPropertiesEntry + 45, // 41: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TaskStatus.update_time:type_name -> google.protobuf.Timestamp + 3, // 42: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TaskStatus.state:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TaskState + 46, // 43: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.TaskStatus.error:type_name -> google.rpc.Status + 39, // 44: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputOutputs.parameters:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputOutputs.IOParameter + 40, // 45: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputOutputs.artifacts:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputOutputs.IOArtifact + 47, // 46: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.StatusMetadata.CustomPropertiesEntry.value:type_name -> google.protobuf.Value + 47, // 47: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputOutputs.IOParameter.value:type_name -> google.protobuf.Value + 48, // 48: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputOutputs.IOParameter.type:type_name -> kubeflow.pipelines.backend.api.v2beta1.IOType + 49, // 49: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputOutputs.IOParameter.producer:type_name -> kubeflow.pipelines.backend.api.v2beta1.IOProducer + 50, // 50: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputOutputs.IOArtifact.artifacts:type_name -> kubeflow.pipelines.backend.api.v2beta1.Artifact + 48, // 51: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputOutputs.IOArtifact.type:type_name -> kubeflow.pipelines.backend.api.v2beta1.IOType + 49, // 52: kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail.InputOutputs.IOArtifact.producer:type_name -> kubeflow.pipelines.backend.api.v2beta1.IOProducer + 11, // 53: kubeflow.pipelines.backend.api.v2beta1.UpdateTasksBulkRequest.TasksEntry.value:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail + 11, // 54: kubeflow.pipelines.backend.api.v2beta1.UpdateTasksBulkResponse.TasksEntry.value:type_name -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail + 14, // 55: kubeflow.pipelines.backend.api.v2beta1.RunService.CreateRun:input_type -> kubeflow.pipelines.backend.api.v2beta1.CreateRunRequest + 15, // 56: kubeflow.pipelines.backend.api.v2beta1.RunService.GetRun:input_type -> kubeflow.pipelines.backend.api.v2beta1.GetRunRequest + 16, // 57: kubeflow.pipelines.backend.api.v2beta1.RunService.ListRuns:input_type -> kubeflow.pipelines.backend.api.v2beta1.ListRunsRequest + 19, // 58: kubeflow.pipelines.backend.api.v2beta1.RunService.ArchiveRun:input_type -> kubeflow.pipelines.backend.api.v2beta1.ArchiveRunRequest + 20, // 59: kubeflow.pipelines.backend.api.v2beta1.RunService.UnarchiveRun:input_type -> kubeflow.pipelines.backend.api.v2beta1.UnarchiveRunRequest + 21, // 60: kubeflow.pipelines.backend.api.v2beta1.RunService.DeleteRun:input_type -> kubeflow.pipelines.backend.api.v2beta1.DeleteRunRequest + 22, // 61: kubeflow.pipelines.backend.api.v2beta1.RunService.ReadArtifact:input_type -> kubeflow.pipelines.backend.api.v2beta1.ReadArtifactRequest + 17, // 62: kubeflow.pipelines.backend.api.v2beta1.RunService.TerminateRun:input_type -> kubeflow.pipelines.backend.api.v2beta1.TerminateRunRequest + 24, // 63: kubeflow.pipelines.backend.api.v2beta1.RunService.RetryRun:input_type -> kubeflow.pipelines.backend.api.v2beta1.RetryRunRequest + 25, // 64: kubeflow.pipelines.backend.api.v2beta1.RunService.CreateTask:input_type -> kubeflow.pipelines.backend.api.v2beta1.CreateTaskRequest + 26, // 65: kubeflow.pipelines.backend.api.v2beta1.RunService.UpdateTask:input_type -> kubeflow.pipelines.backend.api.v2beta1.UpdateTaskRequest + 27, // 66: kubeflow.pipelines.backend.api.v2beta1.RunService.UpdateTasksBulk:input_type -> kubeflow.pipelines.backend.api.v2beta1.UpdateTasksBulkRequest + 29, // 67: kubeflow.pipelines.backend.api.v2beta1.RunService.GetTask:input_type -> kubeflow.pipelines.backend.api.v2beta1.GetTaskRequest + 30, // 68: kubeflow.pipelines.backend.api.v2beta1.RunService.ListTasks:input_type -> kubeflow.pipelines.backend.api.v2beta1.ListTasksRequest + 7, // 69: kubeflow.pipelines.backend.api.v2beta1.RunService.CreateRun:output_type -> kubeflow.pipelines.backend.api.v2beta1.Run + 7, // 70: kubeflow.pipelines.backend.api.v2beta1.RunService.GetRun:output_type -> kubeflow.pipelines.backend.api.v2beta1.Run + 18, // 71: kubeflow.pipelines.backend.api.v2beta1.RunService.ListRuns:output_type -> kubeflow.pipelines.backend.api.v2beta1.ListRunsResponse + 51, // 72: kubeflow.pipelines.backend.api.v2beta1.RunService.ArchiveRun:output_type -> google.protobuf.Empty + 51, // 73: kubeflow.pipelines.backend.api.v2beta1.RunService.UnarchiveRun:output_type -> google.protobuf.Empty + 51, // 74: kubeflow.pipelines.backend.api.v2beta1.RunService.DeleteRun:output_type -> google.protobuf.Empty + 23, // 75: kubeflow.pipelines.backend.api.v2beta1.RunService.ReadArtifact:output_type -> kubeflow.pipelines.backend.api.v2beta1.ReadArtifactResponse + 51, // 76: kubeflow.pipelines.backend.api.v2beta1.RunService.TerminateRun:output_type -> google.protobuf.Empty + 51, // 77: kubeflow.pipelines.backend.api.v2beta1.RunService.RetryRun:output_type -> google.protobuf.Empty + 11, // 78: kubeflow.pipelines.backend.api.v2beta1.RunService.CreateTask:output_type -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail + 11, // 79: kubeflow.pipelines.backend.api.v2beta1.RunService.UpdateTask:output_type -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail + 28, // 80: kubeflow.pipelines.backend.api.v2beta1.RunService.UpdateTasksBulk:output_type -> kubeflow.pipelines.backend.api.v2beta1.UpdateTasksBulkResponse + 11, // 81: kubeflow.pipelines.backend.api.v2beta1.RunService.GetTask:output_type -> kubeflow.pipelines.backend.api.v2beta1.PipelineTaskDetail + 31, // 82: kubeflow.pipelines.backend.api.v2beta1.RunService.ListTasks:output_type -> kubeflow.pipelines.backend.api.v2beta1.ListTasksResponse + 69, // [69:83] is the sub-list for method output_type + 55, // [55:69] is the sub-list for method input_type + 55, // [55:55] is the sub-list for extension type_name + 55, // [55:55] is the sub-list for extension extendee + 0, // [0:55] is the sub-list for field type_name } func init() { file_backend_api_v2beta1_run_proto_init() } @@ -1952,22 +3328,29 @@ func file_backend_api_v2beta1_run_proto_init() { return } file_backend_api_v2beta1_runtime_config_proto_init() + file_backend_api_v2beta1_artifact_proto_init() file_backend_api_v2beta1_run_proto_msgTypes[0].OneofWrappers = []any{ (*Run_PipelineVersionId)(nil), (*Run_PipelineSpec)(nil), (*Run_PipelineVersionReference)(nil), } - file_backend_api_v2beta1_run_proto_msgTypes[20].OneofWrappers = []any{ - (*PipelineTaskDetail_ChildTask_TaskId)(nil), - (*PipelineTaskDetail_ChildTask_PodName)(nil), + file_backend_api_v2beta1_run_proto_msgTypes[4].OneofWrappers = []any{} + file_backend_api_v2beta1_run_proto_msgTypes[8].OneofWrappers = []any{} + file_backend_api_v2beta1_run_proto_msgTypes[9].OneofWrappers = []any{} + file_backend_api_v2beta1_run_proto_msgTypes[23].OneofWrappers = []any{ + (*ListTasksRequest_ParentId)(nil), + (*ListTasksRequest_RunId)(nil), + (*ListTasksRequest_Namespace)(nil), } + file_backend_api_v2beta1_run_proto_msgTypes[28].OneofWrappers = []any{} + file_backend_api_v2beta1_run_proto_msgTypes[32].OneofWrappers = []any{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: unsafe.Slice(unsafe.StringData(file_backend_api_v2beta1_run_proto_rawDesc), len(file_backend_api_v2beta1_run_proto_rawDesc)), - NumEnums: 2, - NumMessages: 21, + NumEnums: 7, + NumMessages: 36, NumExtensions: 0, NumServices: 1, }, diff --git a/backend/api/v2beta1/go_client/run.pb.gw.go b/backend/api/v2beta1/go_client/run.pb.gw.go index 47787f27e2e..bf212a60696 100644 --- a/backend/api/v2beta1/go_client/run.pb.gw.go +++ b/backend/api/v2beta1/go_client/run.pb.gw.go @@ -514,6 +514,179 @@ func local_request_RunService_RetryRun_0(ctx context.Context, marshaler runtime. return msg, metadata, err } +func request_RunService_CreateTask_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq CreateTaskRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Task); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + msg, err := client.CreateTask(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err +} + +func local_request_RunService_CreateTask_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq CreateTaskRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Task); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + msg, err := server.CreateTask(ctx, &protoReq) + return msg, metadata, err +} + +func request_RunService_UpdateTask_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq UpdateTaskRequest + metadata runtime.ServerMetadata + err error + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Task); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["task_id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_id") + } + protoReq.TaskId, err = runtime.String(val) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_id", err) + } + msg, err := client.UpdateTask(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err +} + +func local_request_RunService_UpdateTask_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq UpdateTaskRequest + metadata runtime.ServerMetadata + err error + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq.Task); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + val, ok := pathParams["task_id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_id") + } + protoReq.TaskId, err = runtime.String(val) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_id", err) + } + msg, err := server.UpdateTask(ctx, &protoReq) + return msg, metadata, err +} + +func request_RunService_UpdateTasksBulk_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq UpdateTasksBulkRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + msg, err := client.UpdateTasksBulk(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err +} + +func local_request_RunService_UpdateTasksBulk_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq UpdateTasksBulkRequest + metadata runtime.ServerMetadata + ) + if err := marshaler.NewDecoder(req.Body).Decode(&protoReq); err != nil && !errors.Is(err, io.EOF) { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + msg, err := server.UpdateTasksBulk(ctx, &protoReq) + return msg, metadata, err +} + +func request_RunService_GetTask_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq GetTaskRequest + metadata runtime.ServerMetadata + err error + ) + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + val, ok := pathParams["task_id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_id") + } + protoReq.TaskId, err = runtime.String(val) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_id", err) + } + msg, err := client.GetTask(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err +} + +func local_request_RunService_GetTask_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq GetTaskRequest + metadata runtime.ServerMetadata + err error + ) + val, ok := pathParams["task_id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_id") + } + protoReq.TaskId, err = runtime.String(val) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_id", err) + } + msg, err := server.GetTask(ctx, &protoReq) + return msg, metadata, err +} + +var filter_RunService_ListTasks_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} + +func request_RunService_ListTasks_0(ctx context.Context, marshaler runtime.Marshaler, client RunServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq ListTasksRequest + metadata runtime.ServerMetadata + ) + if req.Body != nil { + _, _ = io.Copy(io.Discard, req.Body) + } + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_ListTasks_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + msg, err := client.ListTasks(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err +} + +func local_request_RunService_ListTasks_0(ctx context.Context, marshaler runtime.Marshaler, server RunServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var ( + protoReq ListTasksRequest + metadata runtime.ServerMetadata + ) + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_RunService_ListTasks_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + msg, err := server.ListTasks(ctx, &protoReq) + return msg, metadata, err +} + // RegisterRunServiceHandlerServer registers the http handlers for service RunService to "mux". // UnaryRPC :call RunServiceServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. @@ -700,6 +873,106 @@ func RegisterRunServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, } forward_RunService_RetryRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) + mux.Handle(http.MethodPost, pattern_RunService_CreateTask_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/CreateTask", runtime.WithHTTPPathPattern("/apis/v2beta1/tasks")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_RunService_CreateTask_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_RunService_CreateTask_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodPatch, pattern_RunService_UpdateTask_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/UpdateTask", runtime.WithHTTPPathPattern("/apis/v2beta1/tasks/{task_id}")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_RunService_UpdateTask_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_RunService_UpdateTask_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodPost, pattern_RunService_UpdateTasksBulk_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/UpdateTasksBulk", runtime.WithHTTPPathPattern("/apis/v2beta1/tasks:batchUpdate")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_RunService_UpdateTasksBulk_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_RunService_UpdateTasksBulk_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodGet, pattern_RunService_GetTask_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/GetTask", runtime.WithHTTPPathPattern("/apis/v2beta1/tasks/{task_id}")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_RunService_GetTask_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_RunService_GetTask_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodGet, pattern_RunService_ListTasks_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/ListTasks", runtime.WithHTTPPathPattern("/apis/v2beta1/tasks")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_RunService_ListTasks_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_RunService_ListTasks_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) return nil } @@ -893,29 +1166,124 @@ func RegisterRunServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, } forward_RunService_RetryRun_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) + mux.Handle(http.MethodPost, pattern_RunService_CreateTask_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/CreateTask", runtime.WithHTTPPathPattern("/apis/v2beta1/tasks")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_RunService_CreateTask_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_RunService_CreateTask_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodPatch, pattern_RunService_UpdateTask_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/UpdateTask", runtime.WithHTTPPathPattern("/apis/v2beta1/tasks/{task_id}")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_RunService_UpdateTask_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_RunService_UpdateTask_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodPost, pattern_RunService_UpdateTasksBulk_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/UpdateTasksBulk", runtime.WithHTTPPathPattern("/apis/v2beta1/tasks:batchUpdate")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_RunService_UpdateTasksBulk_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_RunService_UpdateTasksBulk_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodGet, pattern_RunService_GetTask_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/GetTask", runtime.WithHTTPPathPattern("/apis/v2beta1/tasks/{task_id}")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_RunService_GetTask_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_RunService_GetTask_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) + mux.Handle(http.MethodGet, pattern_RunService_ListTasks_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/kubeflow.pipelines.backend.api.v2beta1.RunService/ListTasks", runtime.WithHTTPPathPattern("/apis/v2beta1/tasks")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_RunService_ListTasks_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + forward_RunService_ListTasks_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + }) return nil } var ( - pattern_RunService_CreateRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "runs"}, "")) - pattern_RunService_GetRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "")) - pattern_RunService_ListRuns_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "runs"}, "")) - pattern_RunService_ArchiveRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "archive")) - pattern_RunService_UnarchiveRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "unarchive")) - pattern_RunService_DeleteRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "")) - pattern_RunService_ReadArtifact_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4, 1, 0, 4, 1, 5, 5, 2, 6, 1, 0, 4, 1, 5, 7}, []string{"apis", "v2beta1", "runs", "run_id", "nodes", "node_id", "artifacts", "artifact_name"}, "read")) - pattern_RunService_TerminateRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "terminate")) - pattern_RunService_RetryRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "retry")) + pattern_RunService_CreateRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "runs"}, "")) + pattern_RunService_GetRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "")) + pattern_RunService_ListRuns_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "runs"}, "")) + pattern_RunService_ArchiveRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "archive")) + pattern_RunService_UnarchiveRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "unarchive")) + pattern_RunService_DeleteRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "")) + pattern_RunService_ReadArtifact_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4, 1, 0, 4, 1, 5, 5, 2, 6, 1, 0, 4, 1, 5, 7}, []string{"apis", "v2beta1", "runs", "run_id", "nodes", "node_id", "artifacts", "artifact_name"}, "read")) + pattern_RunService_TerminateRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "terminate")) + pattern_RunService_RetryRun_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "runs", "run_id"}, "retry")) + pattern_RunService_CreateTask_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "tasks"}, "")) + pattern_RunService_UpdateTask_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "tasks", "task_id"}, "")) + pattern_RunService_UpdateTasksBulk_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "tasks"}, "batchUpdate")) + pattern_RunService_GetTask_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v2beta1", "tasks", "task_id"}, "")) + pattern_RunService_ListTasks_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v2beta1", "tasks"}, "")) ) var ( - forward_RunService_CreateRun_0 = runtime.ForwardResponseMessage - forward_RunService_GetRun_0 = runtime.ForwardResponseMessage - forward_RunService_ListRuns_0 = runtime.ForwardResponseMessage - forward_RunService_ArchiveRun_0 = runtime.ForwardResponseMessage - forward_RunService_UnarchiveRun_0 = runtime.ForwardResponseMessage - forward_RunService_DeleteRun_0 = runtime.ForwardResponseMessage - forward_RunService_ReadArtifact_0 = runtime.ForwardResponseMessage - forward_RunService_TerminateRun_0 = runtime.ForwardResponseMessage - forward_RunService_RetryRun_0 = runtime.ForwardResponseMessage + forward_RunService_CreateRun_0 = runtime.ForwardResponseMessage + forward_RunService_GetRun_0 = runtime.ForwardResponseMessage + forward_RunService_ListRuns_0 = runtime.ForwardResponseMessage + forward_RunService_ArchiveRun_0 = runtime.ForwardResponseMessage + forward_RunService_UnarchiveRun_0 = runtime.ForwardResponseMessage + forward_RunService_DeleteRun_0 = runtime.ForwardResponseMessage + forward_RunService_ReadArtifact_0 = runtime.ForwardResponseMessage + forward_RunService_TerminateRun_0 = runtime.ForwardResponseMessage + forward_RunService_RetryRun_0 = runtime.ForwardResponseMessage + forward_RunService_CreateTask_0 = runtime.ForwardResponseMessage + forward_RunService_UpdateTask_0 = runtime.ForwardResponseMessage + forward_RunService_UpdateTasksBulk_0 = runtime.ForwardResponseMessage + forward_RunService_GetTask_0 = runtime.ForwardResponseMessage + forward_RunService_ListTasks_0 = runtime.ForwardResponseMessage ) diff --git a/backend/api/v2beta1/go_client/run_grpc.pb.go b/backend/api/v2beta1/go_client/run_grpc.pb.go index 334fb5bf735..0001d02e5ea 100644 --- a/backend/api/v2beta1/go_client/run_grpc.pb.go +++ b/backend/api/v2beta1/go_client/run_grpc.pb.go @@ -34,15 +34,20 @@ import ( const _ = grpc.SupportPackageIsVersion9 const ( - RunService_CreateRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/CreateRun" - RunService_GetRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/GetRun" - RunService_ListRuns_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/ListRuns" - RunService_ArchiveRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/ArchiveRun" - RunService_UnarchiveRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/UnarchiveRun" - RunService_DeleteRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/DeleteRun" - RunService_ReadArtifact_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/ReadArtifact" - RunService_TerminateRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/TerminateRun" - RunService_RetryRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/RetryRun" + RunService_CreateRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/CreateRun" + RunService_GetRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/GetRun" + RunService_ListRuns_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/ListRuns" + RunService_ArchiveRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/ArchiveRun" + RunService_UnarchiveRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/UnarchiveRun" + RunService_DeleteRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/DeleteRun" + RunService_ReadArtifact_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/ReadArtifact" + RunService_TerminateRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/TerminateRun" + RunService_RetryRun_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/RetryRun" + RunService_CreateTask_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/CreateTask" + RunService_UpdateTask_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/UpdateTask" + RunService_UpdateTasksBulk_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/UpdateTasksBulk" + RunService_GetTask_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/GetTask" + RunService_ListTasks_FullMethodName = "/kubeflow.pipelines.backend.api.v2beta1.RunService/ListTasks" ) // RunServiceClient is the client API for RunService service. @@ -69,6 +74,17 @@ type RunServiceClient interface { TerminateRun(ctx context.Context, in *TerminateRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) // Re-initiates a failed or terminated run. RetryRun(ctx context.Context, in *RetryRunRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + CreateTask(ctx context.Context, in *CreateTaskRequest, opts ...grpc.CallOption) (*PipelineTaskDetail, error) + // Update the task with the provided task details. + // Update Task uses merge semantics for Parameters and does not + // over-write them. This is to accommodate driver/launcher usage + // for asynchronous writes to the same task (e.g. during + // back propagation). Merging parameters avoids encountering + // race conditions. + UpdateTask(ctx context.Context, in *UpdateTaskRequest, opts ...grpc.CallOption) (*PipelineTaskDetail, error) + UpdateTasksBulk(ctx context.Context, in *UpdateTasksBulkRequest, opts ...grpc.CallOption) (*UpdateTasksBulkResponse, error) + GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*PipelineTaskDetail, error) + ListTasks(ctx context.Context, in *ListTasksRequest, opts ...grpc.CallOption) (*ListTasksResponse, error) } type runServiceClient struct { @@ -169,6 +185,56 @@ func (c *runServiceClient) RetryRun(ctx context.Context, in *RetryRunRequest, op return out, nil } +func (c *runServiceClient) CreateTask(ctx context.Context, in *CreateTaskRequest, opts ...grpc.CallOption) (*PipelineTaskDetail, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(PipelineTaskDetail) + err := c.cc.Invoke(ctx, RunService_CreateTask_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runServiceClient) UpdateTask(ctx context.Context, in *UpdateTaskRequest, opts ...grpc.CallOption) (*PipelineTaskDetail, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(PipelineTaskDetail) + err := c.cc.Invoke(ctx, RunService_UpdateTask_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runServiceClient) UpdateTasksBulk(ctx context.Context, in *UpdateTasksBulkRequest, opts ...grpc.CallOption) (*UpdateTasksBulkResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(UpdateTasksBulkResponse) + err := c.cc.Invoke(ctx, RunService_UpdateTasksBulk_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runServiceClient) GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*PipelineTaskDetail, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(PipelineTaskDetail) + err := c.cc.Invoke(ctx, RunService_GetTask_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runServiceClient) ListTasks(ctx context.Context, in *ListTasksRequest, opts ...grpc.CallOption) (*ListTasksResponse, error) { + cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) + out := new(ListTasksResponse) + err := c.cc.Invoke(ctx, RunService_ListTasks_FullMethodName, in, out, cOpts...) + if err != nil { + return nil, err + } + return out, nil +} + // RunServiceServer is the server API for RunService service. // All implementations must embed UnimplementedRunServiceServer // for forward compatibility. @@ -193,6 +259,17 @@ type RunServiceServer interface { TerminateRun(context.Context, *TerminateRunRequest) (*emptypb.Empty, error) // Re-initiates a failed or terminated run. RetryRun(context.Context, *RetryRunRequest) (*emptypb.Empty, error) + CreateTask(context.Context, *CreateTaskRequest) (*PipelineTaskDetail, error) + // Update the task with the provided task details. + // Update Task uses merge semantics for Parameters and does not + // over-write them. This is to accommodate driver/launcher usage + // for asynchronous writes to the same task (e.g. during + // back propagation). Merging parameters avoids encountering + // race conditions. + UpdateTask(context.Context, *UpdateTaskRequest) (*PipelineTaskDetail, error) + UpdateTasksBulk(context.Context, *UpdateTasksBulkRequest) (*UpdateTasksBulkResponse, error) + GetTask(context.Context, *GetTaskRequest) (*PipelineTaskDetail, error) + ListTasks(context.Context, *ListTasksRequest) (*ListTasksResponse, error) mustEmbedUnimplementedRunServiceServer() } @@ -230,6 +307,21 @@ func (UnimplementedRunServiceServer) TerminateRun(context.Context, *TerminateRun func (UnimplementedRunServiceServer) RetryRun(context.Context, *RetryRunRequest) (*emptypb.Empty, error) { return nil, status.Errorf(codes.Unimplemented, "method RetryRun not implemented") } +func (UnimplementedRunServiceServer) CreateTask(context.Context, *CreateTaskRequest) (*PipelineTaskDetail, error) { + return nil, status.Errorf(codes.Unimplemented, "method CreateTask not implemented") +} +func (UnimplementedRunServiceServer) UpdateTask(context.Context, *UpdateTaskRequest) (*PipelineTaskDetail, error) { + return nil, status.Errorf(codes.Unimplemented, "method UpdateTask not implemented") +} +func (UnimplementedRunServiceServer) UpdateTasksBulk(context.Context, *UpdateTasksBulkRequest) (*UpdateTasksBulkResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method UpdateTasksBulk not implemented") +} +func (UnimplementedRunServiceServer) GetTask(context.Context, *GetTaskRequest) (*PipelineTaskDetail, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetTask not implemented") +} +func (UnimplementedRunServiceServer) ListTasks(context.Context, *ListTasksRequest) (*ListTasksResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListTasks not implemented") +} func (UnimplementedRunServiceServer) mustEmbedUnimplementedRunServiceServer() {} func (UnimplementedRunServiceServer) testEmbeddedByValue() {} @@ -413,6 +505,96 @@ func _RunService_RetryRun_Handler(srv interface{}, ctx context.Context, dec func return interceptor(ctx, in, info, handler) } +func _RunService_CreateTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).CreateTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_CreateTask_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).CreateTask(ctx, req.(*CreateTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RunService_UpdateTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).UpdateTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_UpdateTask_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).UpdateTask(ctx, req.(*UpdateTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RunService_UpdateTasksBulk_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateTasksBulkRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).UpdateTasksBulk(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_UpdateTasksBulk_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).UpdateTasksBulk(ctx, req.(*UpdateTasksBulkRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RunService_GetTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).GetTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_GetTask_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).GetTask(ctx, req.(*GetTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RunService_ListTasks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTasksRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RunServiceServer).ListTasks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: RunService_ListTasks_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RunServiceServer).ListTasks(ctx, req.(*ListTasksRequest)) + } + return interceptor(ctx, in, info, handler) +} + // RunService_ServiceDesc is the grpc.ServiceDesc for RunService service. // It's only intended for direct use with grpc.RegisterService, // and not to be introspected or modified (even as a copy) @@ -456,6 +638,26 @@ var RunService_ServiceDesc = grpc.ServiceDesc{ MethodName: "RetryRun", Handler: _RunService_RetryRun_Handler, }, + { + MethodName: "CreateTask", + Handler: _RunService_CreateTask_Handler, + }, + { + MethodName: "UpdateTask", + Handler: _RunService_UpdateTask_Handler, + }, + { + MethodName: "UpdateTasksBulk", + Handler: _RunService_UpdateTasksBulk_Handler, + }, + { + MethodName: "GetTask", + Handler: _RunService_GetTask_Handler, + }, + { + MethodName: "ListTasks", + Handler: _RunService_ListTasks_Handler, + }, }, Streams: []grpc.StreamDesc{}, Metadata: "backend/api/v2beta1/run.proto", diff --git a/backend/api/v2beta1/go_http_client/artifact_client/artifact_client.go b/backend/api/v2beta1/go_http_client/artifact_client/artifact_client.go new file mode 100644 index 00000000000..ebf23bdcdec --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_client/artifact_client.go @@ -0,0 +1,112 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_client + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "github.com/go-openapi/runtime" + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/artifact_client/artifact_service" +) + +// Default artifact HTTP client. +var Default = NewHTTPClient(nil) + +const ( + // DefaultHost is the default Host + // found in Meta (info) section of spec file + DefaultHost string = "localhost" + // DefaultBasePath is the default BasePath + // found in Meta (info) section of spec file + DefaultBasePath string = "/" +) + +// DefaultSchemes are the default schemes found in Meta (info) section of spec file +var DefaultSchemes = []string{"http", "https"} + +// NewHTTPClient creates a new artifact HTTP client. +func NewHTTPClient(formats strfmt.Registry) *Artifact { + return NewHTTPClientWithConfig(formats, nil) +} + +// NewHTTPClientWithConfig creates a new artifact HTTP client, +// using a customizable transport config. +func NewHTTPClientWithConfig(formats strfmt.Registry, cfg *TransportConfig) *Artifact { + // ensure nullable parameters have default + if cfg == nil { + cfg = DefaultTransportConfig() + } + + // create transport and client + transport := httptransport.New(cfg.Host, cfg.BasePath, cfg.Schemes) + return New(transport, formats) +} + +// New creates a new artifact client +func New(transport runtime.ClientTransport, formats strfmt.Registry) *Artifact { + // ensure nullable parameters have default + if formats == nil { + formats = strfmt.Default + } + + cli := new(Artifact) + cli.Transport = transport + cli.ArtifactService = artifact_service.New(transport, formats) + return cli +} + +// DefaultTransportConfig creates a TransportConfig with the +// default settings taken from the meta section of the spec file. +func DefaultTransportConfig() *TransportConfig { + return &TransportConfig{ + Host: DefaultHost, + BasePath: DefaultBasePath, + Schemes: DefaultSchemes, + } +} + +// TransportConfig contains the transport related info, +// found in the meta section of the spec file. +type TransportConfig struct { + Host string + BasePath string + Schemes []string +} + +// WithHost overrides the default host, +// provided by the meta section of the spec file. +func (cfg *TransportConfig) WithHost(host string) *TransportConfig { + cfg.Host = host + return cfg +} + +// WithBasePath overrides the default basePath, +// provided by the meta section of the spec file. +func (cfg *TransportConfig) WithBasePath(basePath string) *TransportConfig { + cfg.BasePath = basePath + return cfg +} + +// WithSchemes overrides the default schemes, +// provided by the meta section of the spec file. +func (cfg *TransportConfig) WithSchemes(schemes []string) *TransportConfig { + cfg.Schemes = schemes + return cfg +} + +// Artifact is a client for artifact +type Artifact struct { + ArtifactService artifact_service.ClientService + + Transport runtime.ClientTransport +} + +// SetTransport changes the transport on the client and all its subresources +func (c *Artifact) SetTransport(transport runtime.ClientTransport) { + c.Transport = transport + c.ArtifactService.SetTransport(transport) +} diff --git a/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/artifact_service_client.go b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/artifact_service_client.go new file mode 100644 index 00000000000..fa027ef91e6 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/artifact_service_client.go @@ -0,0 +1,336 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "github.com/go-openapi/runtime" + httptransport "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" +) + +// New creates a new artifact service API client. +func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService { + return &Client{transport: transport, formats: formats} +} + +// New creates a new artifact service API client with basic auth credentials. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - user: user for basic authentication header. +// - password: password for basic authentication header. +func NewClientWithBasicAuth(host, basePath, scheme, user, password string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BasicAuth(user, password) + return &Client{transport: transport, formats: strfmt.Default} +} + +// New creates a new artifact service API client with a bearer token for authentication. +// It takes the following parameters: +// - host: http host (github.com). +// - basePath: any base path for the API client ("/v1", "/v3"). +// - scheme: http scheme ("http", "https"). +// - bearerToken: bearer token for Bearer authentication header. +func NewClientWithBearerToken(host, basePath, scheme, bearerToken string) ClientService { + transport := httptransport.New(host, basePath, []string{scheme}) + transport.DefaultAuthentication = httptransport.BearerToken(bearerToken) + return &Client{transport: transport, formats: strfmt.Default} +} + +/* +Client for artifact service API +*/ +type Client struct { + transport runtime.ClientTransport + formats strfmt.Registry +} + +// ClientOption may be used to customize the behavior of Client methods. +type ClientOption func(*runtime.ClientOperation) + +// ClientService is the interface for Client methods +type ClientService interface { + BatchCreateArtifactTasks(params *BatchCreateArtifactTasksParams, opts ...ClientOption) (*BatchCreateArtifactTasksOK, error) + + BatchCreateArtifacts(params *BatchCreateArtifactsParams, opts ...ClientOption) (*BatchCreateArtifactsOK, error) + + CreateArtifact(params *CreateArtifactParams, opts ...ClientOption) (*CreateArtifactOK, error) + + CreateArtifactTask(params *CreateArtifactTaskParams, opts ...ClientOption) (*CreateArtifactTaskOK, error) + + GetArtifact(params *GetArtifactParams, opts ...ClientOption) (*GetArtifactOK, error) + + ListArtifactTasks(params *ListArtifactTasksParams, opts ...ClientOption) (*ListArtifactTasksOK, error) + + ListArtifacts(params *ListArtifactsParams, opts ...ClientOption) (*ListArtifactsOK, error) + + SetTransport(transport runtime.ClientTransport) +} + +/* +BatchCreateArtifactTasks creates multiple artifact task relationships in bulk +*/ +func (a *Client) BatchCreateArtifactTasks(params *BatchCreateArtifactTasksParams, opts ...ClientOption) (*BatchCreateArtifactTasksOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewBatchCreateArtifactTasksParams() + } + op := &runtime.ClientOperation{ + ID: "batch_create_artifact_tasks", + Method: "POST", + PathPattern: "/apis/v2beta1/artifact_tasks:batchCreate", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http", "https"}, + Params: params, + Reader: &BatchCreateArtifactTasksReader{formats: a.formats}, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*BatchCreateArtifactTasksOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*BatchCreateArtifactTasksDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + +/* +BatchCreateArtifacts creates multiple artifacts in bulk +*/ +func (a *Client) BatchCreateArtifacts(params *BatchCreateArtifactsParams, opts ...ClientOption) (*BatchCreateArtifactsOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewBatchCreateArtifactsParams() + } + op := &runtime.ClientOperation{ + ID: "batch_create_artifacts", + Method: "POST", + PathPattern: "/apis/v2beta1/artifacts:batchCreate", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http", "https"}, + Params: params, + Reader: &BatchCreateArtifactsReader{formats: a.formats}, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*BatchCreateArtifactsOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*BatchCreateArtifactsDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + +/* +CreateArtifact creates a new artifact +*/ +func (a *Client) CreateArtifact(params *CreateArtifactParams, opts ...ClientOption) (*CreateArtifactOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewCreateArtifactParams() + } + op := &runtime.ClientOperation{ + ID: "create_artifact", + Method: "POST", + PathPattern: "/apis/v2beta1/artifacts", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http", "https"}, + Params: params, + Reader: &CreateArtifactReader{formats: a.formats}, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*CreateArtifactOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*CreateArtifactDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + +/* +CreateArtifactTask creates an artifact task relationship +*/ +func (a *Client) CreateArtifactTask(params *CreateArtifactTaskParams, opts ...ClientOption) (*CreateArtifactTaskOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewCreateArtifactTaskParams() + } + op := &runtime.ClientOperation{ + ID: "create_artifact_task", + Method: "POST", + PathPattern: "/apis/v2beta1/artifact_tasks", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http", "https"}, + Params: params, + Reader: &CreateArtifactTaskReader{formats: a.formats}, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*CreateArtifactTaskOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*CreateArtifactTaskDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + +/* +GetArtifact finds a specific artifact by ID +*/ +func (a *Client) GetArtifact(params *GetArtifactParams, opts ...ClientOption) (*GetArtifactOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewGetArtifactParams() + } + op := &runtime.ClientOperation{ + ID: "get_artifact", + Method: "GET", + PathPattern: "/apis/v2beta1/artifacts/{artifact_id}", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http", "https"}, + Params: params, + Reader: &GetArtifactReader{formats: a.formats}, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*GetArtifactOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*GetArtifactDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + +/* +ListArtifactTasks lists artifact task relationships +*/ +func (a *Client) ListArtifactTasks(params *ListArtifactTasksParams, opts ...ClientOption) (*ListArtifactTasksOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewListArtifactTasksParams() + } + op := &runtime.ClientOperation{ + ID: "list_artifact_tasks", + Method: "GET", + PathPattern: "/apis/v2beta1/artifact_tasks", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http", "https"}, + Params: params, + Reader: &ListArtifactTasksReader{formats: a.formats}, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*ListArtifactTasksOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*ListArtifactTasksDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + +/* +ListArtifacts finds all artifacts within the specified namespace +*/ +func (a *Client) ListArtifacts(params *ListArtifactsParams, opts ...ClientOption) (*ListArtifactsOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewListArtifactsParams() + } + op := &runtime.ClientOperation{ + ID: "list_artifacts", + Method: "GET", + PathPattern: "/apis/v2beta1/artifacts", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http", "https"}, + Params: params, + Reader: &ListArtifactsReader{formats: a.formats}, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*ListArtifactsOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*ListArtifactsDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + +// SetTransport changes the transport on the client +func (a *Client) SetTransport(transport runtime.ClientTransport) { + a.transport = transport +} diff --git a/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/batch_create_artifact_tasks_parameters.go b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/batch_create_artifact_tasks_parameters.go new file mode 100644 index 00000000000..b73d408bd2c --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/batch_create_artifact_tasks_parameters.go @@ -0,0 +1,150 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/artifact_model" +) + +// NewBatchCreateArtifactTasksParams creates a new BatchCreateArtifactTasksParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewBatchCreateArtifactTasksParams() *BatchCreateArtifactTasksParams { + return &BatchCreateArtifactTasksParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewBatchCreateArtifactTasksParamsWithTimeout creates a new BatchCreateArtifactTasksParams object +// with the ability to set a timeout on a request. +func NewBatchCreateArtifactTasksParamsWithTimeout(timeout time.Duration) *BatchCreateArtifactTasksParams { + return &BatchCreateArtifactTasksParams{ + timeout: timeout, + } +} + +// NewBatchCreateArtifactTasksParamsWithContext creates a new BatchCreateArtifactTasksParams object +// with the ability to set a context for a request. +func NewBatchCreateArtifactTasksParamsWithContext(ctx context.Context) *BatchCreateArtifactTasksParams { + return &BatchCreateArtifactTasksParams{ + Context: ctx, + } +} + +// NewBatchCreateArtifactTasksParamsWithHTTPClient creates a new BatchCreateArtifactTasksParams object +// with the ability to set a custom HTTPClient for a request. +func NewBatchCreateArtifactTasksParamsWithHTTPClient(client *http.Client) *BatchCreateArtifactTasksParams { + return &BatchCreateArtifactTasksParams{ + HTTPClient: client, + } +} + +/* +BatchCreateArtifactTasksParams contains all the parameters to send to the API endpoint + + for the batch create artifact tasks operation. + + Typically these are written to a http.Request. +*/ +type BatchCreateArtifactTasksParams struct { + + // Body. + Body *artifact_model.V2beta1CreateArtifactTasksBulkRequest + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the batch create artifact tasks params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *BatchCreateArtifactTasksParams) WithDefaults() *BatchCreateArtifactTasksParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the batch create artifact tasks params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *BatchCreateArtifactTasksParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the batch create artifact tasks params +func (o *BatchCreateArtifactTasksParams) WithTimeout(timeout time.Duration) *BatchCreateArtifactTasksParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the batch create artifact tasks params +func (o *BatchCreateArtifactTasksParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the batch create artifact tasks params +func (o *BatchCreateArtifactTasksParams) WithContext(ctx context.Context) *BatchCreateArtifactTasksParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the batch create artifact tasks params +func (o *BatchCreateArtifactTasksParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the batch create artifact tasks params +func (o *BatchCreateArtifactTasksParams) WithHTTPClient(client *http.Client) *BatchCreateArtifactTasksParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the batch create artifact tasks params +func (o *BatchCreateArtifactTasksParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithBody adds the body to the batch create artifact tasks params +func (o *BatchCreateArtifactTasksParams) WithBody(body *artifact_model.V2beta1CreateArtifactTasksBulkRequest) *BatchCreateArtifactTasksParams { + o.SetBody(body) + return o +} + +// SetBody adds the body to the batch create artifact tasks params +func (o *BatchCreateArtifactTasksParams) SetBody(body *artifact_model.V2beta1CreateArtifactTasksBulkRequest) { + o.Body = body +} + +// WriteToRequest writes these params to a swagger request +func (o *BatchCreateArtifactTasksParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + if o.Body != nil { + if err := r.SetBodyParam(o.Body); err != nil { + return err + } + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/batch_create_artifact_tasks_responses.go b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/batch_create_artifact_tasks_responses.go new file mode 100644 index 00000000000..75a49702aee --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/batch_create_artifact_tasks_responses.go @@ -0,0 +1,187 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/artifact_model" +) + +// BatchCreateArtifactTasksReader is a Reader for the BatchCreateArtifactTasks structure. +type BatchCreateArtifactTasksReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *BatchCreateArtifactTasksReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewBatchCreateArtifactTasksOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewBatchCreateArtifactTasksDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewBatchCreateArtifactTasksOK creates a BatchCreateArtifactTasksOK with default headers values +func NewBatchCreateArtifactTasksOK() *BatchCreateArtifactTasksOK { + return &BatchCreateArtifactTasksOK{} +} + +/* +BatchCreateArtifactTasksOK describes a response with status code 200, with default header values. + +A successful response. +*/ +type BatchCreateArtifactTasksOK struct { + Payload *artifact_model.V2beta1CreateArtifactTasksBulkResponse +} + +// IsSuccess returns true when this batch create artifact tasks o k response has a 2xx status code +func (o *BatchCreateArtifactTasksOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this batch create artifact tasks o k response has a 3xx status code +func (o *BatchCreateArtifactTasksOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this batch create artifact tasks o k response has a 4xx status code +func (o *BatchCreateArtifactTasksOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this batch create artifact tasks o k response has a 5xx status code +func (o *BatchCreateArtifactTasksOK) IsServerError() bool { + return false +} + +// IsCode returns true when this batch create artifact tasks o k response a status code equal to that given +func (o *BatchCreateArtifactTasksOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the batch create artifact tasks o k response +func (o *BatchCreateArtifactTasksOK) Code() int { + return 200 +} + +func (o *BatchCreateArtifactTasksOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/artifact_tasks:batchCreate][%d] batchCreateArtifactTasksOK %s", 200, payload) +} + +func (o *BatchCreateArtifactTasksOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/artifact_tasks:batchCreate][%d] batchCreateArtifactTasksOK %s", 200, payload) +} + +func (o *BatchCreateArtifactTasksOK) GetPayload() *artifact_model.V2beta1CreateArtifactTasksBulkResponse { + return o.Payload +} + +func (o *BatchCreateArtifactTasksOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(artifact_model.V2beta1CreateArtifactTasksBulkResponse) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewBatchCreateArtifactTasksDefault creates a BatchCreateArtifactTasksDefault with default headers values +func NewBatchCreateArtifactTasksDefault(code int) *BatchCreateArtifactTasksDefault { + return &BatchCreateArtifactTasksDefault{ + _statusCode: code, + } +} + +/* +BatchCreateArtifactTasksDefault describes a response with status code -1, with default header values. + +An unexpected error response. +*/ +type BatchCreateArtifactTasksDefault struct { + _statusCode int + + Payload *artifact_model.GooglerpcStatus +} + +// IsSuccess returns true when this batch create artifact tasks default response has a 2xx status code +func (o *BatchCreateArtifactTasksDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this batch create artifact tasks default response has a 3xx status code +func (o *BatchCreateArtifactTasksDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this batch create artifact tasks default response has a 4xx status code +func (o *BatchCreateArtifactTasksDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this batch create artifact tasks default response has a 5xx status code +func (o *BatchCreateArtifactTasksDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this batch create artifact tasks default response a status code equal to that given +func (o *BatchCreateArtifactTasksDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the batch create artifact tasks default response +func (o *BatchCreateArtifactTasksDefault) Code() int { + return o._statusCode +} + +func (o *BatchCreateArtifactTasksDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/artifact_tasks:batchCreate][%d] batch_create_artifact_tasks default %s", o._statusCode, payload) +} + +func (o *BatchCreateArtifactTasksDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/artifact_tasks:batchCreate][%d] batch_create_artifact_tasks default %s", o._statusCode, payload) +} + +func (o *BatchCreateArtifactTasksDefault) GetPayload() *artifact_model.GooglerpcStatus { + return o.Payload +} + +func (o *BatchCreateArtifactTasksDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(artifact_model.GooglerpcStatus) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/batch_create_artifacts_parameters.go b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/batch_create_artifacts_parameters.go new file mode 100644 index 00000000000..42eeee57682 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/batch_create_artifacts_parameters.go @@ -0,0 +1,150 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/artifact_model" +) + +// NewBatchCreateArtifactsParams creates a new BatchCreateArtifactsParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewBatchCreateArtifactsParams() *BatchCreateArtifactsParams { + return &BatchCreateArtifactsParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewBatchCreateArtifactsParamsWithTimeout creates a new BatchCreateArtifactsParams object +// with the ability to set a timeout on a request. +func NewBatchCreateArtifactsParamsWithTimeout(timeout time.Duration) *BatchCreateArtifactsParams { + return &BatchCreateArtifactsParams{ + timeout: timeout, + } +} + +// NewBatchCreateArtifactsParamsWithContext creates a new BatchCreateArtifactsParams object +// with the ability to set a context for a request. +func NewBatchCreateArtifactsParamsWithContext(ctx context.Context) *BatchCreateArtifactsParams { + return &BatchCreateArtifactsParams{ + Context: ctx, + } +} + +// NewBatchCreateArtifactsParamsWithHTTPClient creates a new BatchCreateArtifactsParams object +// with the ability to set a custom HTTPClient for a request. +func NewBatchCreateArtifactsParamsWithHTTPClient(client *http.Client) *BatchCreateArtifactsParams { + return &BatchCreateArtifactsParams{ + HTTPClient: client, + } +} + +/* +BatchCreateArtifactsParams contains all the parameters to send to the API endpoint + + for the batch create artifacts operation. + + Typically these are written to a http.Request. +*/ +type BatchCreateArtifactsParams struct { + + // Body. + Body *artifact_model.V2beta1CreateArtifactsBulkRequest + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the batch create artifacts params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *BatchCreateArtifactsParams) WithDefaults() *BatchCreateArtifactsParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the batch create artifacts params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *BatchCreateArtifactsParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the batch create artifacts params +func (o *BatchCreateArtifactsParams) WithTimeout(timeout time.Duration) *BatchCreateArtifactsParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the batch create artifacts params +func (o *BatchCreateArtifactsParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the batch create artifacts params +func (o *BatchCreateArtifactsParams) WithContext(ctx context.Context) *BatchCreateArtifactsParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the batch create artifacts params +func (o *BatchCreateArtifactsParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the batch create artifacts params +func (o *BatchCreateArtifactsParams) WithHTTPClient(client *http.Client) *BatchCreateArtifactsParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the batch create artifacts params +func (o *BatchCreateArtifactsParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithBody adds the body to the batch create artifacts params +func (o *BatchCreateArtifactsParams) WithBody(body *artifact_model.V2beta1CreateArtifactsBulkRequest) *BatchCreateArtifactsParams { + o.SetBody(body) + return o +} + +// SetBody adds the body to the batch create artifacts params +func (o *BatchCreateArtifactsParams) SetBody(body *artifact_model.V2beta1CreateArtifactsBulkRequest) { + o.Body = body +} + +// WriteToRequest writes these params to a swagger request +func (o *BatchCreateArtifactsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + if o.Body != nil { + if err := r.SetBodyParam(o.Body); err != nil { + return err + } + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/batch_create_artifacts_responses.go b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/batch_create_artifacts_responses.go new file mode 100644 index 00000000000..8ebd1ee020e --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/batch_create_artifacts_responses.go @@ -0,0 +1,187 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/artifact_model" +) + +// BatchCreateArtifactsReader is a Reader for the BatchCreateArtifacts structure. +type BatchCreateArtifactsReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *BatchCreateArtifactsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewBatchCreateArtifactsOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewBatchCreateArtifactsDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewBatchCreateArtifactsOK creates a BatchCreateArtifactsOK with default headers values +func NewBatchCreateArtifactsOK() *BatchCreateArtifactsOK { + return &BatchCreateArtifactsOK{} +} + +/* +BatchCreateArtifactsOK describes a response with status code 200, with default header values. + +A successful response. +*/ +type BatchCreateArtifactsOK struct { + Payload *artifact_model.V2beta1CreateArtifactsBulkResponse +} + +// IsSuccess returns true when this batch create artifacts o k response has a 2xx status code +func (o *BatchCreateArtifactsOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this batch create artifacts o k response has a 3xx status code +func (o *BatchCreateArtifactsOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this batch create artifacts o k response has a 4xx status code +func (o *BatchCreateArtifactsOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this batch create artifacts o k response has a 5xx status code +func (o *BatchCreateArtifactsOK) IsServerError() bool { + return false +} + +// IsCode returns true when this batch create artifacts o k response a status code equal to that given +func (o *BatchCreateArtifactsOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the batch create artifacts o k response +func (o *BatchCreateArtifactsOK) Code() int { + return 200 +} + +func (o *BatchCreateArtifactsOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/artifacts:batchCreate][%d] batchCreateArtifactsOK %s", 200, payload) +} + +func (o *BatchCreateArtifactsOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/artifacts:batchCreate][%d] batchCreateArtifactsOK %s", 200, payload) +} + +func (o *BatchCreateArtifactsOK) GetPayload() *artifact_model.V2beta1CreateArtifactsBulkResponse { + return o.Payload +} + +func (o *BatchCreateArtifactsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(artifact_model.V2beta1CreateArtifactsBulkResponse) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewBatchCreateArtifactsDefault creates a BatchCreateArtifactsDefault with default headers values +func NewBatchCreateArtifactsDefault(code int) *BatchCreateArtifactsDefault { + return &BatchCreateArtifactsDefault{ + _statusCode: code, + } +} + +/* +BatchCreateArtifactsDefault describes a response with status code -1, with default header values. + +An unexpected error response. +*/ +type BatchCreateArtifactsDefault struct { + _statusCode int + + Payload *artifact_model.GooglerpcStatus +} + +// IsSuccess returns true when this batch create artifacts default response has a 2xx status code +func (o *BatchCreateArtifactsDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this batch create artifacts default response has a 3xx status code +func (o *BatchCreateArtifactsDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this batch create artifacts default response has a 4xx status code +func (o *BatchCreateArtifactsDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this batch create artifacts default response has a 5xx status code +func (o *BatchCreateArtifactsDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this batch create artifacts default response a status code equal to that given +func (o *BatchCreateArtifactsDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the batch create artifacts default response +func (o *BatchCreateArtifactsDefault) Code() int { + return o._statusCode +} + +func (o *BatchCreateArtifactsDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/artifacts:batchCreate][%d] batch_create_artifacts default %s", o._statusCode, payload) +} + +func (o *BatchCreateArtifactsDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/artifacts:batchCreate][%d] batch_create_artifacts default %s", o._statusCode, payload) +} + +func (o *BatchCreateArtifactsDefault) GetPayload() *artifact_model.GooglerpcStatus { + return o.Payload +} + +func (o *BatchCreateArtifactsDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(artifact_model.GooglerpcStatus) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/create_artifact_parameters.go b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/create_artifact_parameters.go new file mode 100644 index 00000000000..2864622c171 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/create_artifact_parameters.go @@ -0,0 +1,150 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/artifact_model" +) + +// NewCreateArtifactParams creates a new CreateArtifactParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewCreateArtifactParams() *CreateArtifactParams { + return &CreateArtifactParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewCreateArtifactParamsWithTimeout creates a new CreateArtifactParams object +// with the ability to set a timeout on a request. +func NewCreateArtifactParamsWithTimeout(timeout time.Duration) *CreateArtifactParams { + return &CreateArtifactParams{ + timeout: timeout, + } +} + +// NewCreateArtifactParamsWithContext creates a new CreateArtifactParams object +// with the ability to set a context for a request. +func NewCreateArtifactParamsWithContext(ctx context.Context) *CreateArtifactParams { + return &CreateArtifactParams{ + Context: ctx, + } +} + +// NewCreateArtifactParamsWithHTTPClient creates a new CreateArtifactParams object +// with the ability to set a custom HTTPClient for a request. +func NewCreateArtifactParamsWithHTTPClient(client *http.Client) *CreateArtifactParams { + return &CreateArtifactParams{ + HTTPClient: client, + } +} + +/* +CreateArtifactParams contains all the parameters to send to the API endpoint + + for the create artifact operation. + + Typically these are written to a http.Request. +*/ +type CreateArtifactParams struct { + + // Body. + Body *artifact_model.V2beta1CreateArtifactRequest + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the create artifact params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *CreateArtifactParams) WithDefaults() *CreateArtifactParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the create artifact params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *CreateArtifactParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the create artifact params +func (o *CreateArtifactParams) WithTimeout(timeout time.Duration) *CreateArtifactParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the create artifact params +func (o *CreateArtifactParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the create artifact params +func (o *CreateArtifactParams) WithContext(ctx context.Context) *CreateArtifactParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the create artifact params +func (o *CreateArtifactParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the create artifact params +func (o *CreateArtifactParams) WithHTTPClient(client *http.Client) *CreateArtifactParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the create artifact params +func (o *CreateArtifactParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithBody adds the body to the create artifact params +func (o *CreateArtifactParams) WithBody(body *artifact_model.V2beta1CreateArtifactRequest) *CreateArtifactParams { + o.SetBody(body) + return o +} + +// SetBody adds the body to the create artifact params +func (o *CreateArtifactParams) SetBody(body *artifact_model.V2beta1CreateArtifactRequest) { + o.Body = body +} + +// WriteToRequest writes these params to a swagger request +func (o *CreateArtifactParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + if o.Body != nil { + if err := r.SetBodyParam(o.Body); err != nil { + return err + } + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/create_artifact_responses.go b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/create_artifact_responses.go new file mode 100644 index 00000000000..29cb49dc6de --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/create_artifact_responses.go @@ -0,0 +1,187 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/artifact_model" +) + +// CreateArtifactReader is a Reader for the CreateArtifact structure. +type CreateArtifactReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *CreateArtifactReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewCreateArtifactOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewCreateArtifactDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewCreateArtifactOK creates a CreateArtifactOK with default headers values +func NewCreateArtifactOK() *CreateArtifactOK { + return &CreateArtifactOK{} +} + +/* +CreateArtifactOK describes a response with status code 200, with default header values. + +A successful response. +*/ +type CreateArtifactOK struct { + Payload *artifact_model.V2beta1Artifact +} + +// IsSuccess returns true when this create artifact o k response has a 2xx status code +func (o *CreateArtifactOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this create artifact o k response has a 3xx status code +func (o *CreateArtifactOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this create artifact o k response has a 4xx status code +func (o *CreateArtifactOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this create artifact o k response has a 5xx status code +func (o *CreateArtifactOK) IsServerError() bool { + return false +} + +// IsCode returns true when this create artifact o k response a status code equal to that given +func (o *CreateArtifactOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the create artifact o k response +func (o *CreateArtifactOK) Code() int { + return 200 +} + +func (o *CreateArtifactOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/artifacts][%d] createArtifactOK %s", 200, payload) +} + +func (o *CreateArtifactOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/artifacts][%d] createArtifactOK %s", 200, payload) +} + +func (o *CreateArtifactOK) GetPayload() *artifact_model.V2beta1Artifact { + return o.Payload +} + +func (o *CreateArtifactOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(artifact_model.V2beta1Artifact) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewCreateArtifactDefault creates a CreateArtifactDefault with default headers values +func NewCreateArtifactDefault(code int) *CreateArtifactDefault { + return &CreateArtifactDefault{ + _statusCode: code, + } +} + +/* +CreateArtifactDefault describes a response with status code -1, with default header values. + +An unexpected error response. +*/ +type CreateArtifactDefault struct { + _statusCode int + + Payload *artifact_model.GooglerpcStatus +} + +// IsSuccess returns true when this create artifact default response has a 2xx status code +func (o *CreateArtifactDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this create artifact default response has a 3xx status code +func (o *CreateArtifactDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this create artifact default response has a 4xx status code +func (o *CreateArtifactDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this create artifact default response has a 5xx status code +func (o *CreateArtifactDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this create artifact default response a status code equal to that given +func (o *CreateArtifactDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the create artifact default response +func (o *CreateArtifactDefault) Code() int { + return o._statusCode +} + +func (o *CreateArtifactDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/artifacts][%d] create_artifact default %s", o._statusCode, payload) +} + +func (o *CreateArtifactDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/artifacts][%d] create_artifact default %s", o._statusCode, payload) +} + +func (o *CreateArtifactDefault) GetPayload() *artifact_model.GooglerpcStatus { + return o.Payload +} + +func (o *CreateArtifactDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(artifact_model.GooglerpcStatus) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/create_artifact_task_parameters.go b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/create_artifact_task_parameters.go new file mode 100644 index 00000000000..8c658550760 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/create_artifact_task_parameters.go @@ -0,0 +1,150 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/artifact_model" +) + +// NewCreateArtifactTaskParams creates a new CreateArtifactTaskParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewCreateArtifactTaskParams() *CreateArtifactTaskParams { + return &CreateArtifactTaskParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewCreateArtifactTaskParamsWithTimeout creates a new CreateArtifactTaskParams object +// with the ability to set a timeout on a request. +func NewCreateArtifactTaskParamsWithTimeout(timeout time.Duration) *CreateArtifactTaskParams { + return &CreateArtifactTaskParams{ + timeout: timeout, + } +} + +// NewCreateArtifactTaskParamsWithContext creates a new CreateArtifactTaskParams object +// with the ability to set a context for a request. +func NewCreateArtifactTaskParamsWithContext(ctx context.Context) *CreateArtifactTaskParams { + return &CreateArtifactTaskParams{ + Context: ctx, + } +} + +// NewCreateArtifactTaskParamsWithHTTPClient creates a new CreateArtifactTaskParams object +// with the ability to set a custom HTTPClient for a request. +func NewCreateArtifactTaskParamsWithHTTPClient(client *http.Client) *CreateArtifactTaskParams { + return &CreateArtifactTaskParams{ + HTTPClient: client, + } +} + +/* +CreateArtifactTaskParams contains all the parameters to send to the API endpoint + + for the create artifact task operation. + + Typically these are written to a http.Request. +*/ +type CreateArtifactTaskParams struct { + + // Body. + Body *artifact_model.V2beta1CreateArtifactTaskRequest + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the create artifact task params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *CreateArtifactTaskParams) WithDefaults() *CreateArtifactTaskParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the create artifact task params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *CreateArtifactTaskParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the create artifact task params +func (o *CreateArtifactTaskParams) WithTimeout(timeout time.Duration) *CreateArtifactTaskParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the create artifact task params +func (o *CreateArtifactTaskParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the create artifact task params +func (o *CreateArtifactTaskParams) WithContext(ctx context.Context) *CreateArtifactTaskParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the create artifact task params +func (o *CreateArtifactTaskParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the create artifact task params +func (o *CreateArtifactTaskParams) WithHTTPClient(client *http.Client) *CreateArtifactTaskParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the create artifact task params +func (o *CreateArtifactTaskParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithBody adds the body to the create artifact task params +func (o *CreateArtifactTaskParams) WithBody(body *artifact_model.V2beta1CreateArtifactTaskRequest) *CreateArtifactTaskParams { + o.SetBody(body) + return o +} + +// SetBody adds the body to the create artifact task params +func (o *CreateArtifactTaskParams) SetBody(body *artifact_model.V2beta1CreateArtifactTaskRequest) { + o.Body = body +} + +// WriteToRequest writes these params to a swagger request +func (o *CreateArtifactTaskParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + if o.Body != nil { + if err := r.SetBodyParam(o.Body); err != nil { + return err + } + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/create_artifact_task_responses.go b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/create_artifact_task_responses.go new file mode 100644 index 00000000000..ac385d61d57 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/create_artifact_task_responses.go @@ -0,0 +1,187 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/artifact_model" +) + +// CreateArtifactTaskReader is a Reader for the CreateArtifactTask structure. +type CreateArtifactTaskReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *CreateArtifactTaskReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewCreateArtifactTaskOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewCreateArtifactTaskDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewCreateArtifactTaskOK creates a CreateArtifactTaskOK with default headers values +func NewCreateArtifactTaskOK() *CreateArtifactTaskOK { + return &CreateArtifactTaskOK{} +} + +/* +CreateArtifactTaskOK describes a response with status code 200, with default header values. + +A successful response. +*/ +type CreateArtifactTaskOK struct { + Payload *artifact_model.V2beta1ArtifactTask +} + +// IsSuccess returns true when this create artifact task o k response has a 2xx status code +func (o *CreateArtifactTaskOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this create artifact task o k response has a 3xx status code +func (o *CreateArtifactTaskOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this create artifact task o k response has a 4xx status code +func (o *CreateArtifactTaskOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this create artifact task o k response has a 5xx status code +func (o *CreateArtifactTaskOK) IsServerError() bool { + return false +} + +// IsCode returns true when this create artifact task o k response a status code equal to that given +func (o *CreateArtifactTaskOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the create artifact task o k response +func (o *CreateArtifactTaskOK) Code() int { + return 200 +} + +func (o *CreateArtifactTaskOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/artifact_tasks][%d] createArtifactTaskOK %s", 200, payload) +} + +func (o *CreateArtifactTaskOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/artifact_tasks][%d] createArtifactTaskOK %s", 200, payload) +} + +func (o *CreateArtifactTaskOK) GetPayload() *artifact_model.V2beta1ArtifactTask { + return o.Payload +} + +func (o *CreateArtifactTaskOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(artifact_model.V2beta1ArtifactTask) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewCreateArtifactTaskDefault creates a CreateArtifactTaskDefault with default headers values +func NewCreateArtifactTaskDefault(code int) *CreateArtifactTaskDefault { + return &CreateArtifactTaskDefault{ + _statusCode: code, + } +} + +/* +CreateArtifactTaskDefault describes a response with status code -1, with default header values. + +An unexpected error response. +*/ +type CreateArtifactTaskDefault struct { + _statusCode int + + Payload *artifact_model.GooglerpcStatus +} + +// IsSuccess returns true when this create artifact task default response has a 2xx status code +func (o *CreateArtifactTaskDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this create artifact task default response has a 3xx status code +func (o *CreateArtifactTaskDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this create artifact task default response has a 4xx status code +func (o *CreateArtifactTaskDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this create artifact task default response has a 5xx status code +func (o *CreateArtifactTaskDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this create artifact task default response a status code equal to that given +func (o *CreateArtifactTaskDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the create artifact task default response +func (o *CreateArtifactTaskDefault) Code() int { + return o._statusCode +} + +func (o *CreateArtifactTaskDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/artifact_tasks][%d] create_artifact_task default %s", o._statusCode, payload) +} + +func (o *CreateArtifactTaskDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/artifact_tasks][%d] create_artifact_task default %s", o._statusCode, payload) +} + +func (o *CreateArtifactTaskDefault) GetPayload() *artifact_model.GooglerpcStatus { + return o.Payload +} + +func (o *CreateArtifactTaskDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(artifact_model.GooglerpcStatus) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/get_artifact_parameters.go b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/get_artifact_parameters.go new file mode 100644 index 00000000000..593026215eb --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/get_artifact_parameters.go @@ -0,0 +1,151 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" +) + +// NewGetArtifactParams creates a new GetArtifactParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewGetArtifactParams() *GetArtifactParams { + return &GetArtifactParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewGetArtifactParamsWithTimeout creates a new GetArtifactParams object +// with the ability to set a timeout on a request. +func NewGetArtifactParamsWithTimeout(timeout time.Duration) *GetArtifactParams { + return &GetArtifactParams{ + timeout: timeout, + } +} + +// NewGetArtifactParamsWithContext creates a new GetArtifactParams object +// with the ability to set a context for a request. +func NewGetArtifactParamsWithContext(ctx context.Context) *GetArtifactParams { + return &GetArtifactParams{ + Context: ctx, + } +} + +// NewGetArtifactParamsWithHTTPClient creates a new GetArtifactParams object +// with the ability to set a custom HTTPClient for a request. +func NewGetArtifactParamsWithHTTPClient(client *http.Client) *GetArtifactParams { + return &GetArtifactParams{ + HTTPClient: client, + } +} + +/* +GetArtifactParams contains all the parameters to send to the API endpoint + + for the get artifact operation. + + Typically these are written to a http.Request. +*/ +type GetArtifactParams struct { + + /* ArtifactID. + + Required. The ID of the artifact to be retrieved. + */ + ArtifactID string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the get artifact params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *GetArtifactParams) WithDefaults() *GetArtifactParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the get artifact params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *GetArtifactParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the get artifact params +func (o *GetArtifactParams) WithTimeout(timeout time.Duration) *GetArtifactParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the get artifact params +func (o *GetArtifactParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the get artifact params +func (o *GetArtifactParams) WithContext(ctx context.Context) *GetArtifactParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the get artifact params +func (o *GetArtifactParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the get artifact params +func (o *GetArtifactParams) WithHTTPClient(client *http.Client) *GetArtifactParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the get artifact params +func (o *GetArtifactParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithArtifactID adds the artifactID to the get artifact params +func (o *GetArtifactParams) WithArtifactID(artifactID string) *GetArtifactParams { + o.SetArtifactID(artifactID) + return o +} + +// SetArtifactID adds the artifactId to the get artifact params +func (o *GetArtifactParams) SetArtifactID(artifactID string) { + o.ArtifactID = artifactID +} + +// WriteToRequest writes these params to a swagger request +func (o *GetArtifactParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + // path param artifact_id + if err := r.SetPathParam("artifact_id", o.ArtifactID); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/get_artifact_responses.go b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/get_artifact_responses.go new file mode 100644 index 00000000000..d92ee56f96d --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/get_artifact_responses.go @@ -0,0 +1,187 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/artifact_model" +) + +// GetArtifactReader is a Reader for the GetArtifact structure. +type GetArtifactReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *GetArtifactReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewGetArtifactOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewGetArtifactDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewGetArtifactOK creates a GetArtifactOK with default headers values +func NewGetArtifactOK() *GetArtifactOK { + return &GetArtifactOK{} +} + +/* +GetArtifactOK describes a response with status code 200, with default header values. + +A successful response. +*/ +type GetArtifactOK struct { + Payload *artifact_model.V2beta1Artifact +} + +// IsSuccess returns true when this get artifact o k response has a 2xx status code +func (o *GetArtifactOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this get artifact o k response has a 3xx status code +func (o *GetArtifactOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this get artifact o k response has a 4xx status code +func (o *GetArtifactOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this get artifact o k response has a 5xx status code +func (o *GetArtifactOK) IsServerError() bool { + return false +} + +// IsCode returns true when this get artifact o k response a status code equal to that given +func (o *GetArtifactOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the get artifact o k response +func (o *GetArtifactOK) Code() int { + return 200 +} + +func (o *GetArtifactOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/artifacts/{artifact_id}][%d] getArtifactOK %s", 200, payload) +} + +func (o *GetArtifactOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/artifacts/{artifact_id}][%d] getArtifactOK %s", 200, payload) +} + +func (o *GetArtifactOK) GetPayload() *artifact_model.V2beta1Artifact { + return o.Payload +} + +func (o *GetArtifactOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(artifact_model.V2beta1Artifact) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewGetArtifactDefault creates a GetArtifactDefault with default headers values +func NewGetArtifactDefault(code int) *GetArtifactDefault { + return &GetArtifactDefault{ + _statusCode: code, + } +} + +/* +GetArtifactDefault describes a response with status code -1, with default header values. + +An unexpected error response. +*/ +type GetArtifactDefault struct { + _statusCode int + + Payload *artifact_model.GooglerpcStatus +} + +// IsSuccess returns true when this get artifact default response has a 2xx status code +func (o *GetArtifactDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this get artifact default response has a 3xx status code +func (o *GetArtifactDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this get artifact default response has a 4xx status code +func (o *GetArtifactDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this get artifact default response has a 5xx status code +func (o *GetArtifactDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this get artifact default response a status code equal to that given +func (o *GetArtifactDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the get artifact default response +func (o *GetArtifactDefault) Code() int { + return o._statusCode +} + +func (o *GetArtifactDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/artifacts/{artifact_id}][%d] get_artifact default %s", o._statusCode, payload) +} + +func (o *GetArtifactDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/artifacts/{artifact_id}][%d] get_artifact default %s", o._statusCode, payload) +} + +func (o *GetArtifactDefault) GetPayload() *artifact_model.GooglerpcStatus { + return o.Payload +} + +func (o *GetArtifactDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(artifact_model.GooglerpcStatus) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/list_artifact_tasks_parameters.go b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/list_artifact_tasks_parameters.go new file mode 100644 index 00000000000..9208d826b59 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/list_artifact_tasks_parameters.go @@ -0,0 +1,468 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// NewListArtifactTasksParams creates a new ListArtifactTasksParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewListArtifactTasksParams() *ListArtifactTasksParams { + return &ListArtifactTasksParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewListArtifactTasksParamsWithTimeout creates a new ListArtifactTasksParams object +// with the ability to set a timeout on a request. +func NewListArtifactTasksParamsWithTimeout(timeout time.Duration) *ListArtifactTasksParams { + return &ListArtifactTasksParams{ + timeout: timeout, + } +} + +// NewListArtifactTasksParamsWithContext creates a new ListArtifactTasksParams object +// with the ability to set a context for a request. +func NewListArtifactTasksParamsWithContext(ctx context.Context) *ListArtifactTasksParams { + return &ListArtifactTasksParams{ + Context: ctx, + } +} + +// NewListArtifactTasksParamsWithHTTPClient creates a new ListArtifactTasksParams object +// with the ability to set a custom HTTPClient for a request. +func NewListArtifactTasksParamsWithHTTPClient(client *http.Client) *ListArtifactTasksParams { + return &ListArtifactTasksParams{ + HTTPClient: client, + } +} + +/* +ListArtifactTasksParams contains all the parameters to send to the API endpoint + + for the list artifact tasks operation. + + Typically these are written to a http.Request. +*/ +type ListArtifactTasksParams struct { + + /* ArtifactIds. + + Optional, filter artifact task by a set of artifact_ids + */ + ArtifactIds []string + + // Filter. + Filter *string + + // PageSize. + // + // Format: int32 + PageSize *int32 + + // PageToken. + PageToken *string + + /* RunIds. + + Optional, filter artifact task by a set of run_ids + */ + RunIds []string + + // SortBy. + SortBy *string + + /* TaskIds. + + Optional, filter artifact task by a set of task_ids + */ + TaskIds []string + + /* Type. + + Optional. Only list artifact tasks that have artifacts of this type. + + - UNSPECIFIED: For validation + - COMPONENT_DEFAULT_INPUT: This is used for inputs that are + provided via default parameters in + the component input definitions + - TASK_OUTPUT_INPUT: This is used for inputs that are + provided via upstream tasks. + In the sdk this appears as: + TaskInputsSpec.kind.task_output_parameter + & TaskInputsSpec.kind.task_output_artifact + - COMPONENT_INPUT: Used for inputs that are + passed from parent tasks. + - RUNTIME_VALUE_INPUT: Hardcoded values passed + as arguments to the task. + - COLLECTED_INPUTS: Used for dsl.Collected + Usage of this type indicates that all + Artifacts within the IOArtifact.artifacts + are inputs collected from sub tasks with + ITERATOR_OUTPUT outputs. + - ITERATOR_INPUT: In a for loop task, introduced via ParallelFor, this type + is used to indicate whether this resolved input belongs + to a parameterIterator or artifactIterator. + In such a case the "artifacts" field for IOArtifact.artifacts + is the list of resolved items for this parallelFor. + - ITERATOR_INPUT_RAW: Hardcoded iterator parameters. + Raw Iterator inputs have no producer + - ITERATOR_OUTPUT: When an output is produced by a Runtime Iteration Task + This value is use to differentiate between standard inputs + - OUTPUT: All other output types fall under this type. + - ONE_OF_OUTPUT: An output of a Conditions branch. + + Default: "UNSPECIFIED" + */ + Type *string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the list artifact tasks params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ListArtifactTasksParams) WithDefaults() *ListArtifactTasksParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the list artifact tasks params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ListArtifactTasksParams) SetDefaults() { + var ( + typeVarDefault = string("UNSPECIFIED") + ) + + val := ListArtifactTasksParams{ + Type: &typeVarDefault, + } + + val.timeout = o.timeout + val.Context = o.Context + val.HTTPClient = o.HTTPClient + *o = val +} + +// WithTimeout adds the timeout to the list artifact tasks params +func (o *ListArtifactTasksParams) WithTimeout(timeout time.Duration) *ListArtifactTasksParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the list artifact tasks params +func (o *ListArtifactTasksParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the list artifact tasks params +func (o *ListArtifactTasksParams) WithContext(ctx context.Context) *ListArtifactTasksParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the list artifact tasks params +func (o *ListArtifactTasksParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the list artifact tasks params +func (o *ListArtifactTasksParams) WithHTTPClient(client *http.Client) *ListArtifactTasksParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the list artifact tasks params +func (o *ListArtifactTasksParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithArtifactIds adds the artifactIds to the list artifact tasks params +func (o *ListArtifactTasksParams) WithArtifactIds(artifactIds []string) *ListArtifactTasksParams { + o.SetArtifactIds(artifactIds) + return o +} + +// SetArtifactIds adds the artifactIds to the list artifact tasks params +func (o *ListArtifactTasksParams) SetArtifactIds(artifactIds []string) { + o.ArtifactIds = artifactIds +} + +// WithFilter adds the filter to the list artifact tasks params +func (o *ListArtifactTasksParams) WithFilter(filter *string) *ListArtifactTasksParams { + o.SetFilter(filter) + return o +} + +// SetFilter adds the filter to the list artifact tasks params +func (o *ListArtifactTasksParams) SetFilter(filter *string) { + o.Filter = filter +} + +// WithPageSize adds the pageSize to the list artifact tasks params +func (o *ListArtifactTasksParams) WithPageSize(pageSize *int32) *ListArtifactTasksParams { + o.SetPageSize(pageSize) + return o +} + +// SetPageSize adds the pageSize to the list artifact tasks params +func (o *ListArtifactTasksParams) SetPageSize(pageSize *int32) { + o.PageSize = pageSize +} + +// WithPageToken adds the pageToken to the list artifact tasks params +func (o *ListArtifactTasksParams) WithPageToken(pageToken *string) *ListArtifactTasksParams { + o.SetPageToken(pageToken) + return o +} + +// SetPageToken adds the pageToken to the list artifact tasks params +func (o *ListArtifactTasksParams) SetPageToken(pageToken *string) { + o.PageToken = pageToken +} + +// WithRunIds adds the runIds to the list artifact tasks params +func (o *ListArtifactTasksParams) WithRunIds(runIds []string) *ListArtifactTasksParams { + o.SetRunIds(runIds) + return o +} + +// SetRunIds adds the runIds to the list artifact tasks params +func (o *ListArtifactTasksParams) SetRunIds(runIds []string) { + o.RunIds = runIds +} + +// WithSortBy adds the sortBy to the list artifact tasks params +func (o *ListArtifactTasksParams) WithSortBy(sortBy *string) *ListArtifactTasksParams { + o.SetSortBy(sortBy) + return o +} + +// SetSortBy adds the sortBy to the list artifact tasks params +func (o *ListArtifactTasksParams) SetSortBy(sortBy *string) { + o.SortBy = sortBy +} + +// WithTaskIds adds the taskIds to the list artifact tasks params +func (o *ListArtifactTasksParams) WithTaskIds(taskIds []string) *ListArtifactTasksParams { + o.SetTaskIds(taskIds) + return o +} + +// SetTaskIds adds the taskIds to the list artifact tasks params +func (o *ListArtifactTasksParams) SetTaskIds(taskIds []string) { + o.TaskIds = taskIds +} + +// WithType adds the typeVar to the list artifact tasks params +func (o *ListArtifactTasksParams) WithType(typeVar *string) *ListArtifactTasksParams { + o.SetType(typeVar) + return o +} + +// SetType adds the type to the list artifact tasks params +func (o *ListArtifactTasksParams) SetType(typeVar *string) { + o.Type = typeVar +} + +// WriteToRequest writes these params to a swagger request +func (o *ListArtifactTasksParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + if o.ArtifactIds != nil { + + // binding items for artifact_ids + joinedArtifactIds := o.bindParamArtifactIds(reg) + + // query array param artifact_ids + if err := r.SetQueryParam("artifact_ids", joinedArtifactIds...); err != nil { + return err + } + } + + if o.Filter != nil { + + // query param filter + var qrFilter string + + if o.Filter != nil { + qrFilter = *o.Filter + } + qFilter := qrFilter + if qFilter != "" { + + if err := r.SetQueryParam("filter", qFilter); err != nil { + return err + } + } + } + + if o.PageSize != nil { + + // query param page_size + var qrPageSize int32 + + if o.PageSize != nil { + qrPageSize = *o.PageSize + } + qPageSize := swag.FormatInt32(qrPageSize) + if qPageSize != "" { + + if err := r.SetQueryParam("page_size", qPageSize); err != nil { + return err + } + } + } + + if o.PageToken != nil { + + // query param page_token + var qrPageToken string + + if o.PageToken != nil { + qrPageToken = *o.PageToken + } + qPageToken := qrPageToken + if qPageToken != "" { + + if err := r.SetQueryParam("page_token", qPageToken); err != nil { + return err + } + } + } + + if o.RunIds != nil { + + // binding items for run_ids + joinedRunIds := o.bindParamRunIds(reg) + + // query array param run_ids + if err := r.SetQueryParam("run_ids", joinedRunIds...); err != nil { + return err + } + } + + if o.SortBy != nil { + + // query param sort_by + var qrSortBy string + + if o.SortBy != nil { + qrSortBy = *o.SortBy + } + qSortBy := qrSortBy + if qSortBy != "" { + + if err := r.SetQueryParam("sort_by", qSortBy); err != nil { + return err + } + } + } + + if o.TaskIds != nil { + + // binding items for task_ids + joinedTaskIds := o.bindParamTaskIds(reg) + + // query array param task_ids + if err := r.SetQueryParam("task_ids", joinedTaskIds...); err != nil { + return err + } + } + + if o.Type != nil { + + // query param type + var qrType string + + if o.Type != nil { + qrType = *o.Type + } + qType := qrType + if qType != "" { + + if err := r.SetQueryParam("type", qType); err != nil { + return err + } + } + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +// bindParamListArtifactTasks binds the parameter artifact_ids +func (o *ListArtifactTasksParams) bindParamArtifactIds(formats strfmt.Registry) []string { + artifactIdsIR := o.ArtifactIds + + var artifactIdsIC []string + for _, artifactIdsIIR := range artifactIdsIR { // explode []string + + artifactIdsIIV := artifactIdsIIR // string as string + artifactIdsIC = append(artifactIdsIC, artifactIdsIIV) + } + + // items.CollectionFormat: "multi" + artifactIdsIS := swag.JoinByFormat(artifactIdsIC, "multi") + + return artifactIdsIS +} + +// bindParamListArtifactTasks binds the parameter run_ids +func (o *ListArtifactTasksParams) bindParamRunIds(formats strfmt.Registry) []string { + runIdsIR := o.RunIds + + var runIdsIC []string + for _, runIdsIIR := range runIdsIR { // explode []string + + runIdsIIV := runIdsIIR // string as string + runIdsIC = append(runIdsIC, runIdsIIV) + } + + // items.CollectionFormat: "multi" + runIdsIS := swag.JoinByFormat(runIdsIC, "multi") + + return runIdsIS +} + +// bindParamListArtifactTasks binds the parameter task_ids +func (o *ListArtifactTasksParams) bindParamTaskIds(formats strfmt.Registry) []string { + taskIdsIR := o.TaskIds + + var taskIdsIC []string + for _, taskIdsIIR := range taskIdsIR { // explode []string + + taskIdsIIV := taskIdsIIR // string as string + taskIdsIC = append(taskIdsIC, taskIdsIIV) + } + + // items.CollectionFormat: "multi" + taskIdsIS := swag.JoinByFormat(taskIdsIC, "multi") + + return taskIdsIS +} diff --git a/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/list_artifact_tasks_responses.go b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/list_artifact_tasks_responses.go new file mode 100644 index 00000000000..a5b6b595556 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/list_artifact_tasks_responses.go @@ -0,0 +1,187 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/artifact_model" +) + +// ListArtifactTasksReader is a Reader for the ListArtifactTasks structure. +type ListArtifactTasksReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *ListArtifactTasksReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewListArtifactTasksOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewListArtifactTasksDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewListArtifactTasksOK creates a ListArtifactTasksOK with default headers values +func NewListArtifactTasksOK() *ListArtifactTasksOK { + return &ListArtifactTasksOK{} +} + +/* +ListArtifactTasksOK describes a response with status code 200, with default header values. + +A successful response. +*/ +type ListArtifactTasksOK struct { + Payload *artifact_model.V2beta1ListArtifactTasksResponse +} + +// IsSuccess returns true when this list artifact tasks o k response has a 2xx status code +func (o *ListArtifactTasksOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this list artifact tasks o k response has a 3xx status code +func (o *ListArtifactTasksOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this list artifact tasks o k response has a 4xx status code +func (o *ListArtifactTasksOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this list artifact tasks o k response has a 5xx status code +func (o *ListArtifactTasksOK) IsServerError() bool { + return false +} + +// IsCode returns true when this list artifact tasks o k response a status code equal to that given +func (o *ListArtifactTasksOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the list artifact tasks o k response +func (o *ListArtifactTasksOK) Code() int { + return 200 +} + +func (o *ListArtifactTasksOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/artifact_tasks][%d] listArtifactTasksOK %s", 200, payload) +} + +func (o *ListArtifactTasksOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/artifact_tasks][%d] listArtifactTasksOK %s", 200, payload) +} + +func (o *ListArtifactTasksOK) GetPayload() *artifact_model.V2beta1ListArtifactTasksResponse { + return o.Payload +} + +func (o *ListArtifactTasksOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(artifact_model.V2beta1ListArtifactTasksResponse) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewListArtifactTasksDefault creates a ListArtifactTasksDefault with default headers values +func NewListArtifactTasksDefault(code int) *ListArtifactTasksDefault { + return &ListArtifactTasksDefault{ + _statusCode: code, + } +} + +/* +ListArtifactTasksDefault describes a response with status code -1, with default header values. + +An unexpected error response. +*/ +type ListArtifactTasksDefault struct { + _statusCode int + + Payload *artifact_model.GooglerpcStatus +} + +// IsSuccess returns true when this list artifact tasks default response has a 2xx status code +func (o *ListArtifactTasksDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this list artifact tasks default response has a 3xx status code +func (o *ListArtifactTasksDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this list artifact tasks default response has a 4xx status code +func (o *ListArtifactTasksDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this list artifact tasks default response has a 5xx status code +func (o *ListArtifactTasksDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this list artifact tasks default response a status code equal to that given +func (o *ListArtifactTasksDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the list artifact tasks default response +func (o *ListArtifactTasksDefault) Code() int { + return o._statusCode +} + +func (o *ListArtifactTasksDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/artifact_tasks][%d] list_artifact_tasks default %s", o._statusCode, payload) +} + +func (o *ListArtifactTasksDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/artifact_tasks][%d] list_artifact_tasks default %s", o._statusCode, payload) +} + +func (o *ListArtifactTasksDefault) GetPayload() *artifact_model.GooglerpcStatus { + return o.Payload +} + +func (o *ListArtifactTasksDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(artifact_model.GooglerpcStatus) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/list_artifacts_parameters.go b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/list_artifacts_parameters.go new file mode 100644 index 00000000000..a480ba3b1a2 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/list_artifacts_parameters.go @@ -0,0 +1,306 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// NewListArtifactsParams creates a new ListArtifactsParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewListArtifactsParams() *ListArtifactsParams { + return &ListArtifactsParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewListArtifactsParamsWithTimeout creates a new ListArtifactsParams object +// with the ability to set a timeout on a request. +func NewListArtifactsParamsWithTimeout(timeout time.Duration) *ListArtifactsParams { + return &ListArtifactsParams{ + timeout: timeout, + } +} + +// NewListArtifactsParamsWithContext creates a new ListArtifactsParams object +// with the ability to set a context for a request. +func NewListArtifactsParamsWithContext(ctx context.Context) *ListArtifactsParams { + return &ListArtifactsParams{ + Context: ctx, + } +} + +// NewListArtifactsParamsWithHTTPClient creates a new ListArtifactsParams object +// with the ability to set a custom HTTPClient for a request. +func NewListArtifactsParamsWithHTTPClient(client *http.Client) *ListArtifactsParams { + return &ListArtifactsParams{ + HTTPClient: client, + } +} + +/* +ListArtifactsParams contains all the parameters to send to the API endpoint + + for the list artifacts operation. + + Typically these are written to a http.Request. +*/ +type ListArtifactsParams struct { + + /* Filter. + + A url-encoded, JSON-serialized filter protocol buffer (see + [filter.proto](https://github.com/kubeflow/artifacts/blob/master/backend/api/filter.proto)). + */ + Filter *string + + /* Namespace. + + Optional input. Namespace for the artifacts. + */ + Namespace *string + + /* PageSize. + + The number of artifacts to be listed per page. If there are more artifacts + than this number, the response message will contain a valid value in the + nextPageToken field. + + Format: int32 + */ + PageSize *int32 + + /* PageToken. + + A page token to request the results page. + */ + PageToken *string + + /* SortBy. + + Sorting order in form of "field_name", "field_name asc" or "field_name desc". + Ascending by default. + */ + SortBy *string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the list artifacts params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ListArtifactsParams) WithDefaults() *ListArtifactsParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the list artifacts params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ListArtifactsParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the list artifacts params +func (o *ListArtifactsParams) WithTimeout(timeout time.Duration) *ListArtifactsParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the list artifacts params +func (o *ListArtifactsParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the list artifacts params +func (o *ListArtifactsParams) WithContext(ctx context.Context) *ListArtifactsParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the list artifacts params +func (o *ListArtifactsParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the list artifacts params +func (o *ListArtifactsParams) WithHTTPClient(client *http.Client) *ListArtifactsParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the list artifacts params +func (o *ListArtifactsParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithFilter adds the filter to the list artifacts params +func (o *ListArtifactsParams) WithFilter(filter *string) *ListArtifactsParams { + o.SetFilter(filter) + return o +} + +// SetFilter adds the filter to the list artifacts params +func (o *ListArtifactsParams) SetFilter(filter *string) { + o.Filter = filter +} + +// WithNamespace adds the namespace to the list artifacts params +func (o *ListArtifactsParams) WithNamespace(namespace *string) *ListArtifactsParams { + o.SetNamespace(namespace) + return o +} + +// SetNamespace adds the namespace to the list artifacts params +func (o *ListArtifactsParams) SetNamespace(namespace *string) { + o.Namespace = namespace +} + +// WithPageSize adds the pageSize to the list artifacts params +func (o *ListArtifactsParams) WithPageSize(pageSize *int32) *ListArtifactsParams { + o.SetPageSize(pageSize) + return o +} + +// SetPageSize adds the pageSize to the list artifacts params +func (o *ListArtifactsParams) SetPageSize(pageSize *int32) { + o.PageSize = pageSize +} + +// WithPageToken adds the pageToken to the list artifacts params +func (o *ListArtifactsParams) WithPageToken(pageToken *string) *ListArtifactsParams { + o.SetPageToken(pageToken) + return o +} + +// SetPageToken adds the pageToken to the list artifacts params +func (o *ListArtifactsParams) SetPageToken(pageToken *string) { + o.PageToken = pageToken +} + +// WithSortBy adds the sortBy to the list artifacts params +func (o *ListArtifactsParams) WithSortBy(sortBy *string) *ListArtifactsParams { + o.SetSortBy(sortBy) + return o +} + +// SetSortBy adds the sortBy to the list artifacts params +func (o *ListArtifactsParams) SetSortBy(sortBy *string) { + o.SortBy = sortBy +} + +// WriteToRequest writes these params to a swagger request +func (o *ListArtifactsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + if o.Filter != nil { + + // query param filter + var qrFilter string + + if o.Filter != nil { + qrFilter = *o.Filter + } + qFilter := qrFilter + if qFilter != "" { + + if err := r.SetQueryParam("filter", qFilter); err != nil { + return err + } + } + } + + if o.Namespace != nil { + + // query param namespace + var qrNamespace string + + if o.Namespace != nil { + qrNamespace = *o.Namespace + } + qNamespace := qrNamespace + if qNamespace != "" { + + if err := r.SetQueryParam("namespace", qNamespace); err != nil { + return err + } + } + } + + if o.PageSize != nil { + + // query param page_size + var qrPageSize int32 + + if o.PageSize != nil { + qrPageSize = *o.PageSize + } + qPageSize := swag.FormatInt32(qrPageSize) + if qPageSize != "" { + + if err := r.SetQueryParam("page_size", qPageSize); err != nil { + return err + } + } + } + + if o.PageToken != nil { + + // query param page_token + var qrPageToken string + + if o.PageToken != nil { + qrPageToken = *o.PageToken + } + qPageToken := qrPageToken + if qPageToken != "" { + + if err := r.SetQueryParam("page_token", qPageToken); err != nil { + return err + } + } + } + + if o.SortBy != nil { + + // query param sort_by + var qrSortBy string + + if o.SortBy != nil { + qrSortBy = *o.SortBy + } + qSortBy := qrSortBy + if qSortBy != "" { + + if err := r.SetQueryParam("sort_by", qSortBy); err != nil { + return err + } + } + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/list_artifacts_responses.go b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/list_artifacts_responses.go new file mode 100644 index 00000000000..f4d5919fee5 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_client/artifact_service/list_artifacts_responses.go @@ -0,0 +1,187 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/artifact_model" +) + +// ListArtifactsReader is a Reader for the ListArtifacts structure. +type ListArtifactsReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *ListArtifactsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewListArtifactsOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewListArtifactsDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewListArtifactsOK creates a ListArtifactsOK with default headers values +func NewListArtifactsOK() *ListArtifactsOK { + return &ListArtifactsOK{} +} + +/* +ListArtifactsOK describes a response with status code 200, with default header values. + +A successful response. +*/ +type ListArtifactsOK struct { + Payload *artifact_model.V2beta1ListArtifactResponse +} + +// IsSuccess returns true when this list artifacts o k response has a 2xx status code +func (o *ListArtifactsOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this list artifacts o k response has a 3xx status code +func (o *ListArtifactsOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this list artifacts o k response has a 4xx status code +func (o *ListArtifactsOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this list artifacts o k response has a 5xx status code +func (o *ListArtifactsOK) IsServerError() bool { + return false +} + +// IsCode returns true when this list artifacts o k response a status code equal to that given +func (o *ListArtifactsOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the list artifacts o k response +func (o *ListArtifactsOK) Code() int { + return 200 +} + +func (o *ListArtifactsOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/artifacts][%d] listArtifactsOK %s", 200, payload) +} + +func (o *ListArtifactsOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/artifacts][%d] listArtifactsOK %s", 200, payload) +} + +func (o *ListArtifactsOK) GetPayload() *artifact_model.V2beta1ListArtifactResponse { + return o.Payload +} + +func (o *ListArtifactsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(artifact_model.V2beta1ListArtifactResponse) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewListArtifactsDefault creates a ListArtifactsDefault with default headers values +func NewListArtifactsDefault(code int) *ListArtifactsDefault { + return &ListArtifactsDefault{ + _statusCode: code, + } +} + +/* +ListArtifactsDefault describes a response with status code -1, with default header values. + +An unexpected error response. +*/ +type ListArtifactsDefault struct { + _statusCode int + + Payload *artifact_model.GooglerpcStatus +} + +// IsSuccess returns true when this list artifacts default response has a 2xx status code +func (o *ListArtifactsDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this list artifacts default response has a 3xx status code +func (o *ListArtifactsDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this list artifacts default response has a 4xx status code +func (o *ListArtifactsDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this list artifacts default response has a 5xx status code +func (o *ListArtifactsDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this list artifacts default response a status code equal to that given +func (o *ListArtifactsDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the list artifacts default response +func (o *ListArtifactsDefault) Code() int { + return o._statusCode +} + +func (o *ListArtifactsDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/artifacts][%d] list_artifacts default %s", o._statusCode, payload) +} + +func (o *ListArtifactsDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/artifacts][%d] list_artifacts default %s", o._statusCode, payload) +} + +func (o *ListArtifactsDefault) GetPayload() *artifact_model.GooglerpcStatus { + return o.Payload +} + +func (o *ListArtifactsDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(artifact_model.GooglerpcStatus) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_model/artifact_artifact_type.go b/backend/api/v2beta1/go_http_client/artifact_model/artifact_artifact_type.go new file mode 100644 index 00000000000..1e76048cfad --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_model/artifact_artifact_type.go @@ -0,0 +1,100 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "encoding/json" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/validate" +) + +// ArtifactArtifactType - TYPE_UNSPECIFIED: default; treated as "not set" +// reject if unset. +// +// swagger:model ArtifactArtifactType +type ArtifactArtifactType string + +func NewArtifactArtifactType(value ArtifactArtifactType) *ArtifactArtifactType { + return &value +} + +// Pointer returns a pointer to a freshly-allocated ArtifactArtifactType. +func (m ArtifactArtifactType) Pointer() *ArtifactArtifactType { + return &m +} + +const ( + + // ArtifactArtifactTypeTYPEUNSPECIFIED captures enum value "TYPE_UNSPECIFIED" + ArtifactArtifactTypeTYPEUNSPECIFIED ArtifactArtifactType = "TYPE_UNSPECIFIED" + + // ArtifactArtifactTypeArtifact captures enum value "Artifact" + ArtifactArtifactTypeArtifact ArtifactArtifactType = "Artifact" + + // ArtifactArtifactTypeModel captures enum value "Model" + ArtifactArtifactTypeModel ArtifactArtifactType = "Model" + + // ArtifactArtifactTypeDataset captures enum value "Dataset" + ArtifactArtifactTypeDataset ArtifactArtifactType = "Dataset" + + // ArtifactArtifactTypeHTML captures enum value "HTML" + ArtifactArtifactTypeHTML ArtifactArtifactType = "HTML" + + // ArtifactArtifactTypeMarkdown captures enum value "Markdown" + ArtifactArtifactTypeMarkdown ArtifactArtifactType = "Markdown" + + // ArtifactArtifactTypeMetric captures enum value "Metric" + ArtifactArtifactTypeMetric ArtifactArtifactType = "Metric" + + // ArtifactArtifactTypeClassificationMetric captures enum value "ClassificationMetric" + ArtifactArtifactTypeClassificationMetric ArtifactArtifactType = "ClassificationMetric" + + // ArtifactArtifactTypeSlicedClassificationMetric captures enum value "SlicedClassificationMetric" + ArtifactArtifactTypeSlicedClassificationMetric ArtifactArtifactType = "SlicedClassificationMetric" +) + +// for schema +var artifactArtifactTypeEnum []interface{} + +func init() { + var res []ArtifactArtifactType + if err := json.Unmarshal([]byte(`["TYPE_UNSPECIFIED","Artifact","Model","Dataset","HTML","Markdown","Metric","ClassificationMetric","SlicedClassificationMetric"]`), &res); err != nil { + panic(err) + } + for _, v := range res { + artifactArtifactTypeEnum = append(artifactArtifactTypeEnum, v) + } +} + +func (m ArtifactArtifactType) validateArtifactArtifactTypeEnum(path, location string, value ArtifactArtifactType) error { + if err := validate.EnumCase(path, location, value, artifactArtifactTypeEnum, true); err != nil { + return err + } + return nil +} + +// Validate validates this artifact artifact type +func (m ArtifactArtifactType) Validate(formats strfmt.Registry) error { + var res []error + + // value enum + if err := m.validateArtifactArtifactTypeEnum("", "body", m); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +// ContextValidate validates this artifact artifact type based on context it is used +func (m ArtifactArtifactType) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_model/googlerpc_status.go b/backend/api/v2beta1/go_http_client/artifact_model/googlerpc_status.go new file mode 100644 index 00000000000..046bd563a51 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_model/googlerpc_status.go @@ -0,0 +1,136 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "strconv" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// GooglerpcStatus The `Status` type defines a logical error model that is suitable for +// different programming environments, including REST APIs and RPC APIs. It is +// used by [gRPC](https://github.com/grpc). Each `Status` message contains +// three pieces of data: error code, error message, and error details. +// +// You can find out more about this error model and how to work with it in the +// [API Design Guide](https://cloud.google.com/apis/design/errors). +// +// swagger:model googlerpcStatus +type GooglerpcStatus struct { + + // The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]. + Code int32 `json:"code,omitempty"` + + // A list of messages that carry the error details. There is a common set of + // message types for APIs to use. + Details []*ProtobufAny `json:"details"` + + // A developer-facing error message, which should be in English. Any + // user-facing error message should be localized and sent in the + // [google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client. + Message string `json:"message,omitempty"` +} + +// Validate validates this googlerpc status +func (m *GooglerpcStatus) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateDetails(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) validateDetails(formats strfmt.Registry) error { + if swag.IsZero(m.Details) { // not required + return nil + } + + for i := 0; i < len(m.Details); i++ { + if swag.IsZero(m.Details[i]) { // not required + continue + } + + if m.Details[i] != nil { + if err := m.Details[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this googlerpc status based on the context it is used +func (m *GooglerpcStatus) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateDetails(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *GooglerpcStatus) contextValidateDetails(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Details); i++ { + + if m.Details[i] != nil { + + if swag.IsZero(m.Details[i]) { // not required + return nil + } + + if err := m.Details[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("details" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("details" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *GooglerpcStatus) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *GooglerpcStatus) UnmarshalBinary(b []byte) error { + var res GooglerpcStatus + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_model/protobuf_any.go b/backend/api/v2beta1/go_http_client/artifact_model/protobuf_any.go new file mode 100644 index 00000000000..e871d716d61 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_model/protobuf_any.go @@ -0,0 +1,292 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "encoding/json" + + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// ProtobufAny `Any` contains an arbitrary serialized protocol buffer message along with a +// URL that describes the type of the serialized message. +// +// Protobuf library provides support to pack/unpack Any values in the form +// of utility functions or additional generated methods of the Any type. +// +// Example 1: Pack and unpack a message in C++. +// +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } +// +// Example 2: Pack and unpack a message in Java. +// +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// // or ... +// if (any.isSameTypeAs(Foo.getDefaultInstance())) { +// foo = any.unpack(Foo.getDefaultInstance()); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := anypb.New(foo) +// if err != nil { +// ... +// } +// ... +// foo := &pb.Foo{} +// if err := any.UnmarshalTo(foo); err != nil { +// ... +// } +// +// The pack methods provided by protobuf library will by default use +// 'type.googleapis.com/full.type.name' as the type URL and the unpack +// methods only use the fully qualified type name after the last '/' +// in the type URL, for example "foo.bar.com/x/y.z" will yield type +// name "y.z". +// +// JSON +// ==== +// The JSON representation of an `Any` value uses the regular +// representation of the deserialized, embedded message, with an +// additional field `@type` which contains the type URL. Example: +// +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } +// +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } +// +// If the embedded message type is well-known and has a custom JSON +// representation, that representation will be embedded adding a field +// `value` which holds the custom JSON in addition to the `@type` +// field. Example (for message [google.protobuf.Duration][]): +// +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// +// swagger:model protobufAny +type ProtobufAny struct { + + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` + + // protobuf any + ProtobufAny map[string]interface{} `json:"-"` +} + +// UnmarshalJSON unmarshals this object with additional properties from JSON +func (m *ProtobufAny) UnmarshalJSON(data []byte) error { + // stage 1, bind the properties + var stage1 struct { + + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` + } + if err := json.Unmarshal(data, &stage1); err != nil { + return err + } + var rcv ProtobufAny + + rcv.AtType = stage1.AtType + *m = rcv + + // stage 2, remove properties and add to map + stage2 := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &stage2); err != nil { + return err + } + + delete(stage2, "@type") + // stage 3, add additional properties values + if len(stage2) > 0 { + result := make(map[string]interface{}) + for k, v := range stage2 { + var toadd interface{} + if err := json.Unmarshal(v, &toadd); err != nil { + return err + } + result[k] = toadd + } + m.ProtobufAny = result + } + + return nil +} + +// MarshalJSON marshals this object with additional properties into a JSON object +func (m ProtobufAny) MarshalJSON() ([]byte, error) { + var stage1 struct { + + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + AtType string `json:"@type,omitempty"` + } + + stage1.AtType = m.AtType + + // make JSON object for known properties + props, err := json.Marshal(stage1) + if err != nil { + return nil, err + } + + if len(m.ProtobufAny) == 0 { // no additional properties + return props, nil + } + + // make JSON object for the additional properties + additional, err := json.Marshal(m.ProtobufAny) + if err != nil { + return nil, err + } + + if len(props) < 3 { // "{}": only additional properties + return additional, nil + } + + // concatenate the 2 objects + return swag.ConcatJSON(props, additional), nil +} + +// Validate validates this protobuf any +func (m *ProtobufAny) Validate(formats strfmt.Registry) error { + return nil +} + +// ContextValidate validates this protobuf any based on context it is used +func (m *ProtobufAny) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + +// MarshalBinary interface implementation +func (m *ProtobufAny) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *ProtobufAny) UnmarshalBinary(b []byte) error { + var res ProtobufAny + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_model/protobuf_null_value.go b/backend/api/v2beta1/go_http_client/artifact_model/protobuf_null_value.go new file mode 100644 index 00000000000..39a2211bd72 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_model/protobuf_null_value.go @@ -0,0 +1,80 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "encoding/json" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/validate" +) + +// ProtobufNullValue `NullValue` is a singleton enumeration to represent the null value for the +// `Value` type union. +// +// The JSON representation for `NullValue` is JSON `null`. +// +// - NULL_VALUE: Null value. +// +// swagger:model protobufNullValue +type ProtobufNullValue string + +func NewProtobufNullValue(value ProtobufNullValue) *ProtobufNullValue { + return &value +} + +// Pointer returns a pointer to a freshly-allocated ProtobufNullValue. +func (m ProtobufNullValue) Pointer() *ProtobufNullValue { + return &m +} + +const ( + + // ProtobufNullValueNULLVALUE captures enum value "NULL_VALUE" + ProtobufNullValueNULLVALUE ProtobufNullValue = "NULL_VALUE" +) + +// for schema +var protobufNullValueEnum []interface{} + +func init() { + var res []ProtobufNullValue + if err := json.Unmarshal([]byte(`["NULL_VALUE"]`), &res); err != nil { + panic(err) + } + for _, v := range res { + protobufNullValueEnum = append(protobufNullValueEnum, v) + } +} + +func (m ProtobufNullValue) validateProtobufNullValueEnum(path, location string, value ProtobufNullValue) error { + if err := validate.EnumCase(path, location, value, protobufNullValueEnum, true); err != nil { + return err + } + return nil +} + +// Validate validates this protobuf null value +func (m ProtobufNullValue) Validate(formats strfmt.Registry) error { + var res []error + + // value enum + if err := m.validateProtobufNullValueEnum("", "body", m); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +// ContextValidate validates this protobuf null value based on context it is used +func (m ProtobufNullValue) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_artifact.go b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_artifact.go new file mode 100644 index 00000000000..07e3f3cfecd --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_artifact.go @@ -0,0 +1,186 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" + "github.com/go-openapi/validate" +) + +// V2beta1Artifact Not to be confused with RuntimeArtifact in PipelineSpec +// +// swagger:model v2beta1Artifact +type V2beta1Artifact struct { + + // Output only. The unique server generated id of the artifact. + // Note: Updated id name to be consistent with other api naming patterns (with prefix) + // Read Only: true + ArtifactID string `json:"artifact_id,omitempty"` + + // Output only. Create time of the artifact in millisecond since epoch. + // Note: The type and name is updated from mlmd artifact to be consistent with other backend apis. + // Read Only: true + // Format: date-time + CreatedAt strfmt.DateTime `json:"created_at,omitempty"` + + // description + Description string `json:"description,omitempty"` + + // Optional. User provided custom properties which are not defined by its type. + Metadata map[string]interface{} `json:"metadata,omitempty"` + + // Required. The client provided name of the artifact. + // Note: in MLMD when name was set, it had to be unique for that type_id + // this restriction is removed here + // If this is a "Metric" artifact, the name of the metric + // is treated as the Key in its K/V pair. + Name string `json:"name,omitempty"` + + // namespace + Namespace string `json:"namespace,omitempty"` + + // Used primarily for metrics + NumberValue float64 `json:"number_value,omitempty"` + + // Required. The name of an ArtifactType. E.g. Dataset + Type *ArtifactArtifactType `json:"type,omitempty"` + + // The uniform resource identifier of the physical artifact. + // May be empty if there is no physical artifact. + URI string `json:"uri,omitempty"` +} + +// Validate validates this v2beta1 artifact +func (m *V2beta1Artifact) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateCreatedAt(formats); err != nil { + res = append(res, err) + } + + if err := m.validateType(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1Artifact) validateCreatedAt(formats strfmt.Registry) error { + if swag.IsZero(m.CreatedAt) { // not required + return nil + } + + if err := validate.FormatOf("created_at", "body", "date-time", m.CreatedAt.String(), formats); err != nil { + return err + } + + return nil +} + +func (m *V2beta1Artifact) validateType(formats strfmt.Registry) error { + if swag.IsZero(m.Type) { // not required + return nil + } + + if m.Type != nil { + if err := m.Type.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +// ContextValidate validate this v2beta1 artifact based on the context it is used +func (m *V2beta1Artifact) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateArtifactID(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateCreatedAt(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateType(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1Artifact) contextValidateArtifactID(ctx context.Context, formats strfmt.Registry) error { + + if err := validate.ReadOnly(ctx, "artifact_id", "body", string(m.ArtifactID)); err != nil { + return err + } + + return nil +} + +func (m *V2beta1Artifact) contextValidateCreatedAt(ctx context.Context, formats strfmt.Registry) error { + + if err := validate.ReadOnly(ctx, "created_at", "body", strfmt.DateTime(m.CreatedAt)); err != nil { + return err + } + + return nil +} + +func (m *V2beta1Artifact) contextValidateType(ctx context.Context, formats strfmt.Registry) error { + + if m.Type != nil { + + if swag.IsZero(m.Type) { // not required + return nil + } + + if err := m.Type.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +// MarshalBinary interface implementation +func (m *V2beta1Artifact) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *V2beta1Artifact) UnmarshalBinary(b []byte) error { + var res V2beta1Artifact + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_artifact_task.go b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_artifact_task.go new file mode 100644 index 00000000000..7481f56a380 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_artifact_task.go @@ -0,0 +1,190 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" + "github.com/go-openapi/validate" +) + +// V2beta1ArtifactTask Describes a relationship link between Artifacts and Tasks +// +// swagger:model v2beta1ArtifactTask +type V2beta1ArtifactTask struct { + + // artifact id + ArtifactID string `json:"artifact_id,omitempty"` + + // Output only. The unique server generated id of the ArtifactTask. + // Read Only: true + ID string `json:"id,omitempty"` + + // key + Key string `json:"key,omitempty"` + + // producer + Producer *V2beta1IOProducer `json:"producer,omitempty"` + + // run id + RunID string `json:"run_id,omitempty"` + + // task id + TaskID string `json:"task_id,omitempty"` + + // type + Type *V2beta1IOType `json:"type,omitempty"` +} + +// Validate validates this v2beta1 artifact task +func (m *V2beta1ArtifactTask) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateProducer(formats); err != nil { + res = append(res, err) + } + + if err := m.validateType(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1ArtifactTask) validateProducer(formats strfmt.Registry) error { + if swag.IsZero(m.Producer) { // not required + return nil + } + + if m.Producer != nil { + if err := m.Producer.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("producer") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("producer") + } + return err + } + } + + return nil +} + +func (m *V2beta1ArtifactTask) validateType(formats strfmt.Registry) error { + if swag.IsZero(m.Type) { // not required + return nil + } + + if m.Type != nil { + if err := m.Type.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +// ContextValidate validate this v2beta1 artifact task based on the context it is used +func (m *V2beta1ArtifactTask) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateID(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateProducer(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateType(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1ArtifactTask) contextValidateID(ctx context.Context, formats strfmt.Registry) error { + + if err := validate.ReadOnly(ctx, "id", "body", string(m.ID)); err != nil { + return err + } + + return nil +} + +func (m *V2beta1ArtifactTask) contextValidateProducer(ctx context.Context, formats strfmt.Registry) error { + + if m.Producer != nil { + + if swag.IsZero(m.Producer) { // not required + return nil + } + + if err := m.Producer.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("producer") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("producer") + } + return err + } + } + + return nil +} + +func (m *V2beta1ArtifactTask) contextValidateType(ctx context.Context, formats strfmt.Registry) error { + + if m.Type != nil { + + if swag.IsZero(m.Type) { // not required + return nil + } + + if err := m.Type.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +// MarshalBinary interface implementation +func (m *V2beta1ArtifactTask) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *V2beta1ArtifactTask) UnmarshalBinary(b []byte) error { + var res V2beta1ArtifactTask + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_create_artifact_request.go b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_create_artifact_request.go new file mode 100644 index 00000000000..9a2db20739e --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_create_artifact_request.go @@ -0,0 +1,179 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// V2beta1CreateArtifactRequest v2beta1 create artifact request +// +// swagger:model v2beta1CreateArtifactRequest +type V2beta1CreateArtifactRequest struct { + + // Required. The artifact to create. + Artifact *V2beta1Artifact `json:"artifact,omitempty"` + + // If the producing task is in a parallelFor iteration + // this field designates the iteration index + IterationIndex string `json:"iteration_index,omitempty"` + + // The outgoing parameter name of this Artifact within this task's component spec. + // For example: + // def preprocess(my_output: dsl.Outputs[dsl.Artifact]): + // ... + // here the producer_key == "my_output" + // Note that producer_task_name == task_name + ProducerKey string `json:"producer_key,omitempty"` + + // An artifact is always created in the context of a + // run. + RunID string `json:"run_id,omitempty"` + + // The Task that is associated with the creation of this artifact. + TaskID string `json:"task_id,omitempty"` + + // type + Type *V2beta1IOType `json:"type,omitempty"` +} + +// Validate validates this v2beta1 create artifact request +func (m *V2beta1CreateArtifactRequest) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateArtifact(formats); err != nil { + res = append(res, err) + } + + if err := m.validateType(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1CreateArtifactRequest) validateArtifact(formats strfmt.Registry) error { + if swag.IsZero(m.Artifact) { // not required + return nil + } + + if m.Artifact != nil { + if err := m.Artifact.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("artifact") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("artifact") + } + return err + } + } + + return nil +} + +func (m *V2beta1CreateArtifactRequest) validateType(formats strfmt.Registry) error { + if swag.IsZero(m.Type) { // not required + return nil + } + + if m.Type != nil { + if err := m.Type.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +// ContextValidate validate this v2beta1 create artifact request based on the context it is used +func (m *V2beta1CreateArtifactRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateArtifact(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateType(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1CreateArtifactRequest) contextValidateArtifact(ctx context.Context, formats strfmt.Registry) error { + + if m.Artifact != nil { + + if swag.IsZero(m.Artifact) { // not required + return nil + } + + if err := m.Artifact.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("artifact") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("artifact") + } + return err + } + } + + return nil +} + +func (m *V2beta1CreateArtifactRequest) contextValidateType(ctx context.Context, formats strfmt.Registry) error { + + if m.Type != nil { + + if swag.IsZero(m.Type) { // not required + return nil + } + + if err := m.Type.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +// MarshalBinary interface implementation +func (m *V2beta1CreateArtifactRequest) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *V2beta1CreateArtifactRequest) UnmarshalBinary(b []byte) error { + var res V2beta1CreateArtifactRequest + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_create_artifact_task_request.go b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_create_artifact_task_request.go new file mode 100644 index 00000000000..748267efcd1 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_create_artifact_task_request.go @@ -0,0 +1,109 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// V2beta1CreateArtifactTaskRequest Request to create an artifact-task relationship +// +// swagger:model v2beta1CreateArtifactTaskRequest +type V2beta1CreateArtifactTaskRequest struct { + + // Required. The artifact-task relationship to create. + ArtifactTask *V2beta1ArtifactTask `json:"artifact_task,omitempty"` +} + +// Validate validates this v2beta1 create artifact task request +func (m *V2beta1CreateArtifactTaskRequest) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateArtifactTask(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1CreateArtifactTaskRequest) validateArtifactTask(formats strfmt.Registry) error { + if swag.IsZero(m.ArtifactTask) { // not required + return nil + } + + if m.ArtifactTask != nil { + if err := m.ArtifactTask.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("artifact_task") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("artifact_task") + } + return err + } + } + + return nil +} + +// ContextValidate validate this v2beta1 create artifact task request based on the context it is used +func (m *V2beta1CreateArtifactTaskRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateArtifactTask(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1CreateArtifactTaskRequest) contextValidateArtifactTask(ctx context.Context, formats strfmt.Registry) error { + + if m.ArtifactTask != nil { + + if swag.IsZero(m.ArtifactTask) { // not required + return nil + } + + if err := m.ArtifactTask.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("artifact_task") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("artifact_task") + } + return err + } + } + + return nil +} + +// MarshalBinary interface implementation +func (m *V2beta1CreateArtifactTaskRequest) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *V2beta1CreateArtifactTaskRequest) UnmarshalBinary(b []byte) error { + var res V2beta1CreateArtifactTaskRequest + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_create_artifact_tasks_bulk_request.go b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_create_artifact_tasks_bulk_request.go new file mode 100644 index 00000000000..8a44c489b5a --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_create_artifact_tasks_bulk_request.go @@ -0,0 +1,121 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "strconv" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// V2beta1CreateArtifactTasksBulkRequest v2beta1 create artifact tasks bulk request +// +// swagger:model v2beta1CreateArtifactTasksBulkRequest +type V2beta1CreateArtifactTasksBulkRequest struct { + + // Required. The list of artifact-task relationships to create. + ArtifactTasks []*V2beta1ArtifactTask `json:"artifact_tasks"` +} + +// Validate validates this v2beta1 create artifact tasks bulk request +func (m *V2beta1CreateArtifactTasksBulkRequest) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateArtifactTasks(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1CreateArtifactTasksBulkRequest) validateArtifactTasks(formats strfmt.Registry) error { + if swag.IsZero(m.ArtifactTasks) { // not required + return nil + } + + for i := 0; i < len(m.ArtifactTasks); i++ { + if swag.IsZero(m.ArtifactTasks[i]) { // not required + continue + } + + if m.ArtifactTasks[i] != nil { + if err := m.ArtifactTasks[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("artifact_tasks" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("artifact_tasks" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this v2beta1 create artifact tasks bulk request based on the context it is used +func (m *V2beta1CreateArtifactTasksBulkRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateArtifactTasks(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1CreateArtifactTasksBulkRequest) contextValidateArtifactTasks(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.ArtifactTasks); i++ { + + if m.ArtifactTasks[i] != nil { + + if swag.IsZero(m.ArtifactTasks[i]) { // not required + return nil + } + + if err := m.ArtifactTasks[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("artifact_tasks" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("artifact_tasks" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *V2beta1CreateArtifactTasksBulkRequest) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *V2beta1CreateArtifactTasksBulkRequest) UnmarshalBinary(b []byte) error { + var res V2beta1CreateArtifactTasksBulkRequest + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_create_artifact_tasks_bulk_response.go b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_create_artifact_tasks_bulk_response.go new file mode 100644 index 00000000000..27eaeb77d3c --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_create_artifact_tasks_bulk_response.go @@ -0,0 +1,121 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "strconv" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// V2beta1CreateArtifactTasksBulkResponse v2beta1 create artifact tasks bulk response +// +// swagger:model v2beta1CreateArtifactTasksBulkResponse +type V2beta1CreateArtifactTasksBulkResponse struct { + + // The list of created artifact-task relationships. + ArtifactTasks []*V2beta1ArtifactTask `json:"artifact_tasks"` +} + +// Validate validates this v2beta1 create artifact tasks bulk response +func (m *V2beta1CreateArtifactTasksBulkResponse) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateArtifactTasks(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1CreateArtifactTasksBulkResponse) validateArtifactTasks(formats strfmt.Registry) error { + if swag.IsZero(m.ArtifactTasks) { // not required + return nil + } + + for i := 0; i < len(m.ArtifactTasks); i++ { + if swag.IsZero(m.ArtifactTasks[i]) { // not required + continue + } + + if m.ArtifactTasks[i] != nil { + if err := m.ArtifactTasks[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("artifact_tasks" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("artifact_tasks" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this v2beta1 create artifact tasks bulk response based on the context it is used +func (m *V2beta1CreateArtifactTasksBulkResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateArtifactTasks(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1CreateArtifactTasksBulkResponse) contextValidateArtifactTasks(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.ArtifactTasks); i++ { + + if m.ArtifactTasks[i] != nil { + + if swag.IsZero(m.ArtifactTasks[i]) { // not required + return nil + } + + if err := m.ArtifactTasks[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("artifact_tasks" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("artifact_tasks" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *V2beta1CreateArtifactTasksBulkResponse) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *V2beta1CreateArtifactTasksBulkResponse) UnmarshalBinary(b []byte) error { + var res V2beta1CreateArtifactTasksBulkResponse + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_create_artifacts_bulk_request.go b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_create_artifacts_bulk_request.go new file mode 100644 index 00000000000..c54bb212592 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_create_artifacts_bulk_request.go @@ -0,0 +1,121 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "strconv" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// V2beta1CreateArtifactsBulkRequest v2beta1 create artifacts bulk request +// +// swagger:model v2beta1CreateArtifactsBulkRequest +type V2beta1CreateArtifactsBulkRequest struct { + + // Required. The list of artifacts to create. + Artifacts []*V2beta1CreateArtifactRequest `json:"artifacts"` +} + +// Validate validates this v2beta1 create artifacts bulk request +func (m *V2beta1CreateArtifactsBulkRequest) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateArtifacts(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1CreateArtifactsBulkRequest) validateArtifacts(formats strfmt.Registry) error { + if swag.IsZero(m.Artifacts) { // not required + return nil + } + + for i := 0; i < len(m.Artifacts); i++ { + if swag.IsZero(m.Artifacts[i]) { // not required + continue + } + + if m.Artifacts[i] != nil { + if err := m.Artifacts[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("artifacts" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("artifacts" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this v2beta1 create artifacts bulk request based on the context it is used +func (m *V2beta1CreateArtifactsBulkRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateArtifacts(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1CreateArtifactsBulkRequest) contextValidateArtifacts(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Artifacts); i++ { + + if m.Artifacts[i] != nil { + + if swag.IsZero(m.Artifacts[i]) { // not required + return nil + } + + if err := m.Artifacts[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("artifacts" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("artifacts" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *V2beta1CreateArtifactsBulkRequest) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *V2beta1CreateArtifactsBulkRequest) UnmarshalBinary(b []byte) error { + var res V2beta1CreateArtifactsBulkRequest + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_create_artifacts_bulk_response.go b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_create_artifacts_bulk_response.go new file mode 100644 index 00000000000..b761e6c63de --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_create_artifacts_bulk_response.go @@ -0,0 +1,121 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "strconv" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// V2beta1CreateArtifactsBulkResponse v2beta1 create artifacts bulk response +// +// swagger:model v2beta1CreateArtifactsBulkResponse +type V2beta1CreateArtifactsBulkResponse struct { + + // The list of created artifacts. + Artifacts []*V2beta1Artifact `json:"artifacts"` +} + +// Validate validates this v2beta1 create artifacts bulk response +func (m *V2beta1CreateArtifactsBulkResponse) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateArtifacts(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1CreateArtifactsBulkResponse) validateArtifacts(formats strfmt.Registry) error { + if swag.IsZero(m.Artifacts) { // not required + return nil + } + + for i := 0; i < len(m.Artifacts); i++ { + if swag.IsZero(m.Artifacts[i]) { // not required + continue + } + + if m.Artifacts[i] != nil { + if err := m.Artifacts[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("artifacts" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("artifacts" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this v2beta1 create artifacts bulk response based on the context it is used +func (m *V2beta1CreateArtifactsBulkResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateArtifacts(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1CreateArtifactsBulkResponse) contextValidateArtifacts(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Artifacts); i++ { + + if m.Artifacts[i] != nil { + + if swag.IsZero(m.Artifacts[i]) { // not required + return nil + } + + if err := m.Artifacts[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("artifacts" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("artifacts" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *V2beta1CreateArtifactsBulkResponse) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *V2beta1CreateArtifactsBulkResponse) UnmarshalBinary(b []byte) error { + var res V2beta1CreateArtifactsBulkResponse + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_i_o_producer.go b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_i_o_producer.go new file mode 100644 index 00000000000..5a2e5b736f3 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_i_o_producer.go @@ -0,0 +1,54 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// V2beta1IOProducer v2beta1 i o producer +// +// swagger:model v2beta1IOProducer +type V2beta1IOProducer struct { + + // When a source is from an iteration Runtime + // task type inside a ParallelFor + Iteration string `json:"iteration,omitempty"` + + // task name + TaskName string `json:"task_name,omitempty"` +} + +// Validate validates this v2beta1 i o producer +func (m *V2beta1IOProducer) Validate(formats strfmt.Registry) error { + return nil +} + +// ContextValidate validates this v2beta1 i o producer based on context it is used +func (m *V2beta1IOProducer) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + +// MarshalBinary interface implementation +func (m *V2beta1IOProducer) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *V2beta1IOProducer) UnmarshalBinary(b []byte) error { + var res V2beta1IOProducer + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_i_o_type.go b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_i_o_type.go new file mode 100644 index 00000000000..c86cabb6695 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_i_o_type.go @@ -0,0 +1,151 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "encoding/json" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/validate" +) + +// V2beta1IOType Describes the I/O relationship between +// Artifacts/Parameters and Tasks. +// There are a couple of instances where +// input/outputs have special types such +// as in the case of LoopArguments or +// dsl.Collected outputs. +// +// - UNSPECIFIED: For validation +// - COMPONENT_DEFAULT_INPUT: This is used for inputs that are +// +// provided via default parameters in +// the component input definitions +// - TASK_OUTPUT_INPUT: This is used for inputs that are +// +// provided via upstream tasks. +// In the sdk this appears as: +// TaskInputsSpec.kind.task_output_parameter +// & TaskInputsSpec.kind.task_output_artifact +// - COMPONENT_INPUT: Used for inputs that are +// +// passed from parent tasks. +// - RUNTIME_VALUE_INPUT: Hardcoded values passed +// +// as arguments to the task. +// - COLLECTED_INPUTS: Used for dsl.Collected +// +// Usage of this type indicates that all +// Artifacts within the IOArtifact.artifacts +// are inputs collected from sub tasks with +// ITERATOR_OUTPUT outputs. +// - ITERATOR_INPUT: In a for loop task, introduced via ParallelFor, this type +// +// is used to indicate whether this resolved input belongs +// to a parameterIterator or artifactIterator. +// In such a case the "artifacts" field for IOArtifact.artifacts +// is the list of resolved items for this parallelFor. +// - ITERATOR_INPUT_RAW: Hardcoded iterator parameters. +// +// Raw Iterator inputs have no producer +// - ITERATOR_OUTPUT: When an output is produced by a Runtime Iteration Task +// +// This value is use to differentiate between standard inputs +// - OUTPUT: All other output types fall under this type. +// - ONE_OF_OUTPUT: An output of a Conditions branch. +// +// swagger:model v2beta1IOType +type V2beta1IOType string + +func NewV2beta1IOType(value V2beta1IOType) *V2beta1IOType { + return &value +} + +// Pointer returns a pointer to a freshly-allocated V2beta1IOType. +func (m V2beta1IOType) Pointer() *V2beta1IOType { + return &m +} + +const ( + + // V2beta1IOTypeUNSPECIFIED captures enum value "UNSPECIFIED" + V2beta1IOTypeUNSPECIFIED V2beta1IOType = "UNSPECIFIED" + + // V2beta1IOTypeCOMPONENTDEFAULTINPUT captures enum value "COMPONENT_DEFAULT_INPUT" + V2beta1IOTypeCOMPONENTDEFAULTINPUT V2beta1IOType = "COMPONENT_DEFAULT_INPUT" + + // V2beta1IOTypeTASKOUTPUTINPUT captures enum value "TASK_OUTPUT_INPUT" + V2beta1IOTypeTASKOUTPUTINPUT V2beta1IOType = "TASK_OUTPUT_INPUT" + + // V2beta1IOTypeCOMPONENTINPUT captures enum value "COMPONENT_INPUT" + V2beta1IOTypeCOMPONENTINPUT V2beta1IOType = "COMPONENT_INPUT" + + // V2beta1IOTypeRUNTIMEVALUEINPUT captures enum value "RUNTIME_VALUE_INPUT" + V2beta1IOTypeRUNTIMEVALUEINPUT V2beta1IOType = "RUNTIME_VALUE_INPUT" + + // V2beta1IOTypeCOLLECTEDINPUTS captures enum value "COLLECTED_INPUTS" + V2beta1IOTypeCOLLECTEDINPUTS V2beta1IOType = "COLLECTED_INPUTS" + + // V2beta1IOTypeITERATORINPUT captures enum value "ITERATOR_INPUT" + V2beta1IOTypeITERATORINPUT V2beta1IOType = "ITERATOR_INPUT" + + // V2beta1IOTypeITERATORINPUTRAW captures enum value "ITERATOR_INPUT_RAW" + V2beta1IOTypeITERATORINPUTRAW V2beta1IOType = "ITERATOR_INPUT_RAW" + + // V2beta1IOTypeITERATOROUTPUT captures enum value "ITERATOR_OUTPUT" + V2beta1IOTypeITERATOROUTPUT V2beta1IOType = "ITERATOR_OUTPUT" + + // V2beta1IOTypeOUTPUT captures enum value "OUTPUT" + V2beta1IOTypeOUTPUT V2beta1IOType = "OUTPUT" + + // V2beta1IOTypeONEOFOUTPUT captures enum value "ONE_OF_OUTPUT" + V2beta1IOTypeONEOFOUTPUT V2beta1IOType = "ONE_OF_OUTPUT" + + // V2beta1IOTypeTASKFINALSTATUSOUTPUT captures enum value "TASK_FINAL_STATUS_OUTPUT" + V2beta1IOTypeTASKFINALSTATUSOUTPUT V2beta1IOType = "TASK_FINAL_STATUS_OUTPUT" +) + +// for schema +var v2beta1IOTypeEnum []interface{} + +func init() { + var res []V2beta1IOType + if err := json.Unmarshal([]byte(`["UNSPECIFIED","COMPONENT_DEFAULT_INPUT","TASK_OUTPUT_INPUT","COMPONENT_INPUT","RUNTIME_VALUE_INPUT","COLLECTED_INPUTS","ITERATOR_INPUT","ITERATOR_INPUT_RAW","ITERATOR_OUTPUT","OUTPUT","ONE_OF_OUTPUT","TASK_FINAL_STATUS_OUTPUT"]`), &res); err != nil { + panic(err) + } + for _, v := range res { + v2beta1IOTypeEnum = append(v2beta1IOTypeEnum, v) + } +} + +func (m V2beta1IOType) validateV2beta1IOTypeEnum(path, location string, value V2beta1IOType) error { + if err := validate.EnumCase(path, location, value, v2beta1IOTypeEnum, true); err != nil { + return err + } + return nil +} + +// Validate validates this v2beta1 i o type +func (m V2beta1IOType) Validate(formats strfmt.Registry) error { + var res []error + + // value enum + if err := m.validateV2beta1IOTypeEnum("", "body", m); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +// ContextValidate validates this v2beta1 i o type based on context it is used +func (m V2beta1IOType) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_list_artifact_response.go b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_list_artifact_response.go new file mode 100644 index 00000000000..5214edea4d7 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_list_artifact_response.go @@ -0,0 +1,128 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "strconv" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// V2beta1ListArtifactResponse v2beta1 list artifact response +// +// swagger:model v2beta1ListArtifactResponse +type V2beta1ListArtifactResponse struct { + + // The list of artifacts returned. + Artifacts []*V2beta1Artifact `json:"artifacts"` + + // A token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `json:"next_page_token,omitempty"` + + // The total number of artifacts available. This field is not always populated. + TotalSize int32 `json:"total_size,omitempty"` +} + +// Validate validates this v2beta1 list artifact response +func (m *V2beta1ListArtifactResponse) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateArtifacts(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1ListArtifactResponse) validateArtifacts(formats strfmt.Registry) error { + if swag.IsZero(m.Artifacts) { // not required + return nil + } + + for i := 0; i < len(m.Artifacts); i++ { + if swag.IsZero(m.Artifacts[i]) { // not required + continue + } + + if m.Artifacts[i] != nil { + if err := m.Artifacts[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("artifacts" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("artifacts" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this v2beta1 list artifact response based on the context it is used +func (m *V2beta1ListArtifactResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateArtifacts(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1ListArtifactResponse) contextValidateArtifacts(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Artifacts); i++ { + + if m.Artifacts[i] != nil { + + if swag.IsZero(m.Artifacts[i]) { // not required + return nil + } + + if err := m.Artifacts[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("artifacts" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("artifacts" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *V2beta1ListArtifactResponse) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *V2beta1ListArtifactResponse) UnmarshalBinary(b []byte) error { + var res V2beta1ListArtifactResponse + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_list_artifact_tasks_response.go b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_list_artifact_tasks_response.go new file mode 100644 index 00000000000..4770519e327 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/artifact_model/v2beta1_list_artifact_tasks_response.go @@ -0,0 +1,127 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package artifact_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "strconv" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// V2beta1ListArtifactTasksResponse v2beta1 list artifact tasks response +// +// swagger:model v2beta1ListArtifactTasksResponse +type V2beta1ListArtifactTasksResponse struct { + + // artifact tasks + ArtifactTasks []*V2beta1ArtifactTask `json:"artifact_tasks"` + + // next page token + NextPageToken string `json:"next_page_token,omitempty"` + + // total size + TotalSize int32 `json:"total_size,omitempty"` +} + +// Validate validates this v2beta1 list artifact tasks response +func (m *V2beta1ListArtifactTasksResponse) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateArtifactTasks(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1ListArtifactTasksResponse) validateArtifactTasks(formats strfmt.Registry) error { + if swag.IsZero(m.ArtifactTasks) { // not required + return nil + } + + for i := 0; i < len(m.ArtifactTasks); i++ { + if swag.IsZero(m.ArtifactTasks[i]) { // not required + continue + } + + if m.ArtifactTasks[i] != nil { + if err := m.ArtifactTasks[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("artifact_tasks" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("artifact_tasks" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this v2beta1 list artifact tasks response based on the context it is used +func (m *V2beta1ListArtifactTasksResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateArtifactTasks(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1ListArtifactTasksResponse) contextValidateArtifactTasks(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.ArtifactTasks); i++ { + + if m.ArtifactTasks[i] != nil { + + if swag.IsZero(m.ArtifactTasks[i]) { // not required + return nil + } + + if err := m.ArtifactTasks[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("artifact_tasks" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("artifact_tasks" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *V2beta1ListArtifactTasksResponse) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *V2beta1ListArtifactTasksResponse) UnmarshalBinary(b []byte) error { + var res V2beta1ListArtifactTasksResponse + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/batch_update_tasks_parameters.go b/backend/api/v2beta1/go_http_client/run_client/run_service/batch_update_tasks_parameters.go new file mode 100644 index 00000000000..8be81103ba5 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/batch_update_tasks_parameters.go @@ -0,0 +1,150 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" +) + +// NewBatchUpdateTasksParams creates a new BatchUpdateTasksParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewBatchUpdateTasksParams() *BatchUpdateTasksParams { + return &BatchUpdateTasksParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewBatchUpdateTasksParamsWithTimeout creates a new BatchUpdateTasksParams object +// with the ability to set a timeout on a request. +func NewBatchUpdateTasksParamsWithTimeout(timeout time.Duration) *BatchUpdateTasksParams { + return &BatchUpdateTasksParams{ + timeout: timeout, + } +} + +// NewBatchUpdateTasksParamsWithContext creates a new BatchUpdateTasksParams object +// with the ability to set a context for a request. +func NewBatchUpdateTasksParamsWithContext(ctx context.Context) *BatchUpdateTasksParams { + return &BatchUpdateTasksParams{ + Context: ctx, + } +} + +// NewBatchUpdateTasksParamsWithHTTPClient creates a new BatchUpdateTasksParams object +// with the ability to set a custom HTTPClient for a request. +func NewBatchUpdateTasksParamsWithHTTPClient(client *http.Client) *BatchUpdateTasksParams { + return &BatchUpdateTasksParams{ + HTTPClient: client, + } +} + +/* +BatchUpdateTasksParams contains all the parameters to send to the API endpoint + + for the batch update tasks operation. + + Typically these are written to a http.Request. +*/ +type BatchUpdateTasksParams struct { + + // Body. + Body *run_model.V2beta1UpdateTasksBulkRequest + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the batch update tasks params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *BatchUpdateTasksParams) WithDefaults() *BatchUpdateTasksParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the batch update tasks params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *BatchUpdateTasksParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the batch update tasks params +func (o *BatchUpdateTasksParams) WithTimeout(timeout time.Duration) *BatchUpdateTasksParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the batch update tasks params +func (o *BatchUpdateTasksParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the batch update tasks params +func (o *BatchUpdateTasksParams) WithContext(ctx context.Context) *BatchUpdateTasksParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the batch update tasks params +func (o *BatchUpdateTasksParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the batch update tasks params +func (o *BatchUpdateTasksParams) WithHTTPClient(client *http.Client) *BatchUpdateTasksParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the batch update tasks params +func (o *BatchUpdateTasksParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithBody adds the body to the batch update tasks params +func (o *BatchUpdateTasksParams) WithBody(body *run_model.V2beta1UpdateTasksBulkRequest) *BatchUpdateTasksParams { + o.SetBody(body) + return o +} + +// SetBody adds the body to the batch update tasks params +func (o *BatchUpdateTasksParams) SetBody(body *run_model.V2beta1UpdateTasksBulkRequest) { + o.Body = body +} + +// WriteToRequest writes these params to a swagger request +func (o *BatchUpdateTasksParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + if o.Body != nil { + if err := r.SetBodyParam(o.Body); err != nil { + return err + } + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/batch_update_tasks_responses.go b/backend/api/v2beta1/go_http_client/run_client/run_service/batch_update_tasks_responses.go new file mode 100644 index 00000000000..60cb7d12900 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/batch_update_tasks_responses.go @@ -0,0 +1,187 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" +) + +// BatchUpdateTasksReader is a Reader for the BatchUpdateTasks structure. +type BatchUpdateTasksReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *BatchUpdateTasksReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewBatchUpdateTasksOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewBatchUpdateTasksDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewBatchUpdateTasksOK creates a BatchUpdateTasksOK with default headers values +func NewBatchUpdateTasksOK() *BatchUpdateTasksOK { + return &BatchUpdateTasksOK{} +} + +/* +BatchUpdateTasksOK describes a response with status code 200, with default header values. + +A successful response. +*/ +type BatchUpdateTasksOK struct { + Payload *run_model.V2beta1UpdateTasksBulkResponse +} + +// IsSuccess returns true when this batch update tasks o k response has a 2xx status code +func (o *BatchUpdateTasksOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this batch update tasks o k response has a 3xx status code +func (o *BatchUpdateTasksOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this batch update tasks o k response has a 4xx status code +func (o *BatchUpdateTasksOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this batch update tasks o k response has a 5xx status code +func (o *BatchUpdateTasksOK) IsServerError() bool { + return false +} + +// IsCode returns true when this batch update tasks o k response a status code equal to that given +func (o *BatchUpdateTasksOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the batch update tasks o k response +func (o *BatchUpdateTasksOK) Code() int { + return 200 +} + +func (o *BatchUpdateTasksOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/tasks:batchUpdate][%d] batchUpdateTasksOK %s", 200, payload) +} + +func (o *BatchUpdateTasksOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/tasks:batchUpdate][%d] batchUpdateTasksOK %s", 200, payload) +} + +func (o *BatchUpdateTasksOK) GetPayload() *run_model.V2beta1UpdateTasksBulkResponse { + return o.Payload +} + +func (o *BatchUpdateTasksOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(run_model.V2beta1UpdateTasksBulkResponse) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewBatchUpdateTasksDefault creates a BatchUpdateTasksDefault with default headers values +func NewBatchUpdateTasksDefault(code int) *BatchUpdateTasksDefault { + return &BatchUpdateTasksDefault{ + _statusCode: code, + } +} + +/* +BatchUpdateTasksDefault describes a response with status code -1, with default header values. + +An unexpected error response. +*/ +type BatchUpdateTasksDefault struct { + _statusCode int + + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this batch update tasks default response has a 2xx status code +func (o *BatchUpdateTasksDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this batch update tasks default response has a 3xx status code +func (o *BatchUpdateTasksDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this batch update tasks default response has a 4xx status code +func (o *BatchUpdateTasksDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this batch update tasks default response has a 5xx status code +func (o *BatchUpdateTasksDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this batch update tasks default response a status code equal to that given +func (o *BatchUpdateTasksDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the batch update tasks default response +func (o *BatchUpdateTasksDefault) Code() int { + return o._statusCode +} + +func (o *BatchUpdateTasksDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/tasks:batchUpdate][%d] batch_update_tasks default %s", o._statusCode, payload) +} + +func (o *BatchUpdateTasksDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/tasks:batchUpdate][%d] batch_update_tasks default %s", o._statusCode, payload) +} + +func (o *BatchUpdateTasksDefault) GetPayload() *run_model.GooglerpcStatus { + return o.Payload +} + +func (o *BatchUpdateTasksDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(run_model.GooglerpcStatus) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/create_task_parameters.go b/backend/api/v2beta1/go_http_client/run_client/run_service/create_task_parameters.go new file mode 100644 index 00000000000..350f3e12b0b --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/create_task_parameters.go @@ -0,0 +1,150 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" +) + +// NewCreateTaskParams creates a new CreateTaskParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewCreateTaskParams() *CreateTaskParams { + return &CreateTaskParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewCreateTaskParamsWithTimeout creates a new CreateTaskParams object +// with the ability to set a timeout on a request. +func NewCreateTaskParamsWithTimeout(timeout time.Duration) *CreateTaskParams { + return &CreateTaskParams{ + timeout: timeout, + } +} + +// NewCreateTaskParamsWithContext creates a new CreateTaskParams object +// with the ability to set a context for a request. +func NewCreateTaskParamsWithContext(ctx context.Context) *CreateTaskParams { + return &CreateTaskParams{ + Context: ctx, + } +} + +// NewCreateTaskParamsWithHTTPClient creates a new CreateTaskParams object +// with the ability to set a custom HTTPClient for a request. +func NewCreateTaskParamsWithHTTPClient(client *http.Client) *CreateTaskParams { + return &CreateTaskParams{ + HTTPClient: client, + } +} + +/* +CreateTaskParams contains all the parameters to send to the API endpoint + + for the create task operation. + + Typically these are written to a http.Request. +*/ +type CreateTaskParams struct { + + // Task. + Task *run_model.V2beta1PipelineTaskDetail + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the create task params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *CreateTaskParams) WithDefaults() *CreateTaskParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the create task params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *CreateTaskParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the create task params +func (o *CreateTaskParams) WithTimeout(timeout time.Duration) *CreateTaskParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the create task params +func (o *CreateTaskParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the create task params +func (o *CreateTaskParams) WithContext(ctx context.Context) *CreateTaskParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the create task params +func (o *CreateTaskParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the create task params +func (o *CreateTaskParams) WithHTTPClient(client *http.Client) *CreateTaskParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the create task params +func (o *CreateTaskParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithTask adds the task to the create task params +func (o *CreateTaskParams) WithTask(task *run_model.V2beta1PipelineTaskDetail) *CreateTaskParams { + o.SetTask(task) + return o +} + +// SetTask adds the task to the create task params +func (o *CreateTaskParams) SetTask(task *run_model.V2beta1PipelineTaskDetail) { + o.Task = task +} + +// WriteToRequest writes these params to a swagger request +func (o *CreateTaskParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + if o.Task != nil { + if err := r.SetBodyParam(o.Task); err != nil { + return err + } + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/create_task_responses.go b/backend/api/v2beta1/go_http_client/run_client/run_service/create_task_responses.go new file mode 100644 index 00000000000..d233d277165 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/create_task_responses.go @@ -0,0 +1,187 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" +) + +// CreateTaskReader is a Reader for the CreateTask structure. +type CreateTaskReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *CreateTaskReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewCreateTaskOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewCreateTaskDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewCreateTaskOK creates a CreateTaskOK with default headers values +func NewCreateTaskOK() *CreateTaskOK { + return &CreateTaskOK{} +} + +/* +CreateTaskOK describes a response with status code 200, with default header values. + +A successful response. +*/ +type CreateTaskOK struct { + Payload *run_model.V2beta1PipelineTaskDetail +} + +// IsSuccess returns true when this create task o k response has a 2xx status code +func (o *CreateTaskOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this create task o k response has a 3xx status code +func (o *CreateTaskOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this create task o k response has a 4xx status code +func (o *CreateTaskOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this create task o k response has a 5xx status code +func (o *CreateTaskOK) IsServerError() bool { + return false +} + +// IsCode returns true when this create task o k response a status code equal to that given +func (o *CreateTaskOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the create task o k response +func (o *CreateTaskOK) Code() int { + return 200 +} + +func (o *CreateTaskOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/tasks][%d] createTaskOK %s", 200, payload) +} + +func (o *CreateTaskOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/tasks][%d] createTaskOK %s", 200, payload) +} + +func (o *CreateTaskOK) GetPayload() *run_model.V2beta1PipelineTaskDetail { + return o.Payload +} + +func (o *CreateTaskOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(run_model.V2beta1PipelineTaskDetail) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewCreateTaskDefault creates a CreateTaskDefault with default headers values +func NewCreateTaskDefault(code int) *CreateTaskDefault { + return &CreateTaskDefault{ + _statusCode: code, + } +} + +/* +CreateTaskDefault describes a response with status code -1, with default header values. + +An unexpected error response. +*/ +type CreateTaskDefault struct { + _statusCode int + + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this create task default response has a 2xx status code +func (o *CreateTaskDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this create task default response has a 3xx status code +func (o *CreateTaskDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this create task default response has a 4xx status code +func (o *CreateTaskDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this create task default response has a 5xx status code +func (o *CreateTaskDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this create task default response a status code equal to that given +func (o *CreateTaskDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the create task default response +func (o *CreateTaskDefault) Code() int { + return o._statusCode +} + +func (o *CreateTaskDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/tasks][%d] create_task default %s", o._statusCode, payload) +} + +func (o *CreateTaskDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[POST /apis/v2beta1/tasks][%d] create_task default %s", o._statusCode, payload) +} + +func (o *CreateTaskDefault) GetPayload() *run_model.GooglerpcStatus { + return o.Payload +} + +func (o *CreateTaskDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(run_model.GooglerpcStatus) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/get_task_parameters.go b/backend/api/v2beta1/go_http_client/run_client/run_service/get_task_parameters.go new file mode 100644 index 00000000000..0a13b0e39cf --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/get_task_parameters.go @@ -0,0 +1,148 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" +) + +// NewGetTaskParams creates a new GetTaskParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewGetTaskParams() *GetTaskParams { + return &GetTaskParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewGetTaskParamsWithTimeout creates a new GetTaskParams object +// with the ability to set a timeout on a request. +func NewGetTaskParamsWithTimeout(timeout time.Duration) *GetTaskParams { + return &GetTaskParams{ + timeout: timeout, + } +} + +// NewGetTaskParamsWithContext creates a new GetTaskParams object +// with the ability to set a context for a request. +func NewGetTaskParamsWithContext(ctx context.Context) *GetTaskParams { + return &GetTaskParams{ + Context: ctx, + } +} + +// NewGetTaskParamsWithHTTPClient creates a new GetTaskParams object +// with the ability to set a custom HTTPClient for a request. +func NewGetTaskParamsWithHTTPClient(client *http.Client) *GetTaskParams { + return &GetTaskParams{ + HTTPClient: client, + } +} + +/* +GetTaskParams contains all the parameters to send to the API endpoint + + for the get task operation. + + Typically these are written to a http.Request. +*/ +type GetTaskParams struct { + + // TaskID. + TaskID string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the get task params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *GetTaskParams) WithDefaults() *GetTaskParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the get task params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *GetTaskParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the get task params +func (o *GetTaskParams) WithTimeout(timeout time.Duration) *GetTaskParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the get task params +func (o *GetTaskParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the get task params +func (o *GetTaskParams) WithContext(ctx context.Context) *GetTaskParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the get task params +func (o *GetTaskParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the get task params +func (o *GetTaskParams) WithHTTPClient(client *http.Client) *GetTaskParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the get task params +func (o *GetTaskParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithTaskID adds the taskID to the get task params +func (o *GetTaskParams) WithTaskID(taskID string) *GetTaskParams { + o.SetTaskID(taskID) + return o +} + +// SetTaskID adds the taskId to the get task params +func (o *GetTaskParams) SetTaskID(taskID string) { + o.TaskID = taskID +} + +// WriteToRequest writes these params to a swagger request +func (o *GetTaskParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + // path param task_id + if err := r.SetPathParam("task_id", o.TaskID); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/get_task_responses.go b/backend/api/v2beta1/go_http_client/run_client/run_service/get_task_responses.go new file mode 100644 index 00000000000..73361f1ce74 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/get_task_responses.go @@ -0,0 +1,187 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" +) + +// GetTaskReader is a Reader for the GetTask structure. +type GetTaskReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *GetTaskReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewGetTaskOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewGetTaskDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewGetTaskOK creates a GetTaskOK with default headers values +func NewGetTaskOK() *GetTaskOK { + return &GetTaskOK{} +} + +/* +GetTaskOK describes a response with status code 200, with default header values. + +A successful response. +*/ +type GetTaskOK struct { + Payload *run_model.V2beta1PipelineTaskDetail +} + +// IsSuccess returns true when this get task o k response has a 2xx status code +func (o *GetTaskOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this get task o k response has a 3xx status code +func (o *GetTaskOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this get task o k response has a 4xx status code +func (o *GetTaskOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this get task o k response has a 5xx status code +func (o *GetTaskOK) IsServerError() bool { + return false +} + +// IsCode returns true when this get task o k response a status code equal to that given +func (o *GetTaskOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the get task o k response +func (o *GetTaskOK) Code() int { + return 200 +} + +func (o *GetTaskOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/tasks/{task_id}][%d] getTaskOK %s", 200, payload) +} + +func (o *GetTaskOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/tasks/{task_id}][%d] getTaskOK %s", 200, payload) +} + +func (o *GetTaskOK) GetPayload() *run_model.V2beta1PipelineTaskDetail { + return o.Payload +} + +func (o *GetTaskOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(run_model.V2beta1PipelineTaskDetail) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewGetTaskDefault creates a GetTaskDefault with default headers values +func NewGetTaskDefault(code int) *GetTaskDefault { + return &GetTaskDefault{ + _statusCode: code, + } +} + +/* +GetTaskDefault describes a response with status code -1, with default header values. + +An unexpected error response. +*/ +type GetTaskDefault struct { + _statusCode int + + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this get task default response has a 2xx status code +func (o *GetTaskDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this get task default response has a 3xx status code +func (o *GetTaskDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this get task default response has a 4xx status code +func (o *GetTaskDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this get task default response has a 5xx status code +func (o *GetTaskDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this get task default response a status code equal to that given +func (o *GetTaskDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the get task default response +func (o *GetTaskDefault) Code() int { + return o._statusCode +} + +func (o *GetTaskDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/tasks/{task_id}][%d] get_task default %s", o._statusCode, payload) +} + +func (o *GetTaskDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/tasks/{task_id}][%d] get_task default %s", o._statusCode, payload) +} + +func (o *GetTaskDefault) GetPayload() *run_model.GooglerpcStatus { + return o.Payload +} + +func (o *GetTaskDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(run_model.GooglerpcStatus) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/list_tasks_parameters.go b/backend/api/v2beta1/go_http_client/run_client/run_service/list_tasks_parameters.go new file mode 100644 index 00000000000..4b2e088a0e3 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/list_tasks_parameters.go @@ -0,0 +1,359 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// NewListTasksParams creates a new ListTasksParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewListTasksParams() *ListTasksParams { + return &ListTasksParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewListTasksParamsWithTimeout creates a new ListTasksParams object +// with the ability to set a timeout on a request. +func NewListTasksParamsWithTimeout(timeout time.Duration) *ListTasksParams { + return &ListTasksParams{ + timeout: timeout, + } +} + +// NewListTasksParamsWithContext creates a new ListTasksParams object +// with the ability to set a context for a request. +func NewListTasksParamsWithContext(ctx context.Context) *ListTasksParams { + return &ListTasksParams{ + Context: ctx, + } +} + +// NewListTasksParamsWithHTTPClient creates a new ListTasksParams object +// with the ability to set a custom HTTPClient for a request. +func NewListTasksParamsWithHTTPClient(client *http.Client) *ListTasksParams { + return &ListTasksParams{ + HTTPClient: client, + } +} + +/* +ListTasksParams contains all the parameters to send to the API endpoint + + for the list tasks operation. + + Typically these are written to a http.Request. +*/ +type ListTasksParams struct { + + // Filter. + Filter *string + + /* Namespace. + + List all tasks in this namespace. + The primary use case for this filter is to detect cache hits. + */ + Namespace *string + + // OrderBy. + OrderBy *string + + // PageSize. + // + // Format: int32 + PageSize *int32 + + // PageToken. + PageToken *string + + /* ParentID. + + List all tasks with this parent task. + */ + ParentID *string + + /* RunID. + + List all tasks for this run. + */ + RunID *string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the list tasks params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ListTasksParams) WithDefaults() *ListTasksParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the list tasks params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *ListTasksParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the list tasks params +func (o *ListTasksParams) WithTimeout(timeout time.Duration) *ListTasksParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the list tasks params +func (o *ListTasksParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the list tasks params +func (o *ListTasksParams) WithContext(ctx context.Context) *ListTasksParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the list tasks params +func (o *ListTasksParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the list tasks params +func (o *ListTasksParams) WithHTTPClient(client *http.Client) *ListTasksParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the list tasks params +func (o *ListTasksParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithFilter adds the filter to the list tasks params +func (o *ListTasksParams) WithFilter(filter *string) *ListTasksParams { + o.SetFilter(filter) + return o +} + +// SetFilter adds the filter to the list tasks params +func (o *ListTasksParams) SetFilter(filter *string) { + o.Filter = filter +} + +// WithNamespace adds the namespace to the list tasks params +func (o *ListTasksParams) WithNamespace(namespace *string) *ListTasksParams { + o.SetNamespace(namespace) + return o +} + +// SetNamespace adds the namespace to the list tasks params +func (o *ListTasksParams) SetNamespace(namespace *string) { + o.Namespace = namespace +} + +// WithOrderBy adds the orderBy to the list tasks params +func (o *ListTasksParams) WithOrderBy(orderBy *string) *ListTasksParams { + o.SetOrderBy(orderBy) + return o +} + +// SetOrderBy adds the orderBy to the list tasks params +func (o *ListTasksParams) SetOrderBy(orderBy *string) { + o.OrderBy = orderBy +} + +// WithPageSize adds the pageSize to the list tasks params +func (o *ListTasksParams) WithPageSize(pageSize *int32) *ListTasksParams { + o.SetPageSize(pageSize) + return o +} + +// SetPageSize adds the pageSize to the list tasks params +func (o *ListTasksParams) SetPageSize(pageSize *int32) { + o.PageSize = pageSize +} + +// WithPageToken adds the pageToken to the list tasks params +func (o *ListTasksParams) WithPageToken(pageToken *string) *ListTasksParams { + o.SetPageToken(pageToken) + return o +} + +// SetPageToken adds the pageToken to the list tasks params +func (o *ListTasksParams) SetPageToken(pageToken *string) { + o.PageToken = pageToken +} + +// WithParentID adds the parentID to the list tasks params +func (o *ListTasksParams) WithParentID(parentID *string) *ListTasksParams { + o.SetParentID(parentID) + return o +} + +// SetParentID adds the parentId to the list tasks params +func (o *ListTasksParams) SetParentID(parentID *string) { + o.ParentID = parentID +} + +// WithRunID adds the runID to the list tasks params +func (o *ListTasksParams) WithRunID(runID *string) *ListTasksParams { + o.SetRunID(runID) + return o +} + +// SetRunID adds the runId to the list tasks params +func (o *ListTasksParams) SetRunID(runID *string) { + o.RunID = runID +} + +// WriteToRequest writes these params to a swagger request +func (o *ListTasksParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + if o.Filter != nil { + + // query param filter + var qrFilter string + + if o.Filter != nil { + qrFilter = *o.Filter + } + qFilter := qrFilter + if qFilter != "" { + + if err := r.SetQueryParam("filter", qFilter); err != nil { + return err + } + } + } + + if o.Namespace != nil { + + // query param namespace + var qrNamespace string + + if o.Namespace != nil { + qrNamespace = *o.Namespace + } + qNamespace := qrNamespace + if qNamespace != "" { + + if err := r.SetQueryParam("namespace", qNamespace); err != nil { + return err + } + } + } + + if o.OrderBy != nil { + + // query param order_by + var qrOrderBy string + + if o.OrderBy != nil { + qrOrderBy = *o.OrderBy + } + qOrderBy := qrOrderBy + if qOrderBy != "" { + + if err := r.SetQueryParam("order_by", qOrderBy); err != nil { + return err + } + } + } + + if o.PageSize != nil { + + // query param page_size + var qrPageSize int32 + + if o.PageSize != nil { + qrPageSize = *o.PageSize + } + qPageSize := swag.FormatInt32(qrPageSize) + if qPageSize != "" { + + if err := r.SetQueryParam("page_size", qPageSize); err != nil { + return err + } + } + } + + if o.PageToken != nil { + + // query param page_token + var qrPageToken string + + if o.PageToken != nil { + qrPageToken = *o.PageToken + } + qPageToken := qrPageToken + if qPageToken != "" { + + if err := r.SetQueryParam("page_token", qPageToken); err != nil { + return err + } + } + } + + if o.ParentID != nil { + + // query param parent_id + var qrParentID string + + if o.ParentID != nil { + qrParentID = *o.ParentID + } + qParentID := qrParentID + if qParentID != "" { + + if err := r.SetQueryParam("parent_id", qParentID); err != nil { + return err + } + } + } + + if o.RunID != nil { + + // query param run_id + var qrRunID string + + if o.RunID != nil { + qrRunID = *o.RunID + } + qRunID := qrRunID + if qRunID != "" { + + if err := r.SetQueryParam("run_id", qRunID); err != nil { + return err + } + } + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/list_tasks_responses.go b/backend/api/v2beta1/go_http_client/run_client/run_service/list_tasks_responses.go new file mode 100644 index 00000000000..c3a125cdae0 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/list_tasks_responses.go @@ -0,0 +1,187 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" +) + +// ListTasksReader is a Reader for the ListTasks structure. +type ListTasksReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *ListTasksReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewListTasksOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewListTasksDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewListTasksOK creates a ListTasksOK with default headers values +func NewListTasksOK() *ListTasksOK { + return &ListTasksOK{} +} + +/* +ListTasksOK describes a response with status code 200, with default header values. + +A successful response. +*/ +type ListTasksOK struct { + Payload *run_model.V2beta1ListTasksResponse +} + +// IsSuccess returns true when this list tasks o k response has a 2xx status code +func (o *ListTasksOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this list tasks o k response has a 3xx status code +func (o *ListTasksOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this list tasks o k response has a 4xx status code +func (o *ListTasksOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this list tasks o k response has a 5xx status code +func (o *ListTasksOK) IsServerError() bool { + return false +} + +// IsCode returns true when this list tasks o k response a status code equal to that given +func (o *ListTasksOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the list tasks o k response +func (o *ListTasksOK) Code() int { + return 200 +} + +func (o *ListTasksOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/tasks][%d] listTasksOK %s", 200, payload) +} + +func (o *ListTasksOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/tasks][%d] listTasksOK %s", 200, payload) +} + +func (o *ListTasksOK) GetPayload() *run_model.V2beta1ListTasksResponse { + return o.Payload +} + +func (o *ListTasksOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(run_model.V2beta1ListTasksResponse) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewListTasksDefault creates a ListTasksDefault with default headers values +func NewListTasksDefault(code int) *ListTasksDefault { + return &ListTasksDefault{ + _statusCode: code, + } +} + +/* +ListTasksDefault describes a response with status code -1, with default header values. + +An unexpected error response. +*/ +type ListTasksDefault struct { + _statusCode int + + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this list tasks default response has a 2xx status code +func (o *ListTasksDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this list tasks default response has a 3xx status code +func (o *ListTasksDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this list tasks default response has a 4xx status code +func (o *ListTasksDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this list tasks default response has a 5xx status code +func (o *ListTasksDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this list tasks default response a status code equal to that given +func (o *ListTasksDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the list tasks default response +func (o *ListTasksDefault) Code() int { + return o._statusCode +} + +func (o *ListTasksDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/tasks][%d] list_tasks default %s", o._statusCode, payload) +} + +func (o *ListTasksDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[GET /apis/v2beta1/tasks][%d] list_tasks default %s", o._statusCode, payload) +} + +func (o *ListTasksDefault) GetPayload() *run_model.GooglerpcStatus { + return o.Payload +} + +func (o *ListTasksDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(run_model.GooglerpcStatus) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_client.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_client.go index 3aa7c5bdddc..6235eb4adee 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_client.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_client.go @@ -72,6 +72,16 @@ type ClientService interface { RunServiceUnarchiveRun(params *RunServiceUnarchiveRunParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*RunServiceUnarchiveRunOK, error) + BatchUpdateTasks(params *BatchUpdateTasksParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*BatchUpdateTasksOK, error) + + CreateTask(params *CreateTaskParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateTaskOK, error) + + GetTask(params *GetTaskParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*GetTaskOK, error) + + ListTasks(params *ListTasksParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListTasksOK, error) + + UpdateTask(params *UpdateTaskParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UpdateTaskOK, error) + SetTransport(transport runtime.ClientTransport) } @@ -417,6 +427,196 @@ func (a *Client) RunServiceUnarchiveRun(params *RunServiceUnarchiveRunParams, au return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) } +/* +BatchUpdateTasks updates multiple tasks in bulk +*/ +func (a *Client) BatchUpdateTasks(params *BatchUpdateTasksParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*BatchUpdateTasksOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewBatchUpdateTasksParams() + } + op := &runtime.ClientOperation{ + ID: "batch_update_tasks", + Method: "POST", + PathPattern: "/apis/v2beta1/tasks:batchUpdate", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http", "https"}, + Params: params, + Reader: &BatchUpdateTasksReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*BatchUpdateTasksOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*BatchUpdateTasksDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + +/* +CreateTask creates a new task +*/ +func (a *Client) CreateTask(params *CreateTaskParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*CreateTaskOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewCreateTaskParams() + } + op := &runtime.ClientOperation{ + ID: "create_task", + Method: "POST", + PathPattern: "/apis/v2beta1/tasks", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http", "https"}, + Params: params, + Reader: &CreateTaskReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*CreateTaskOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*CreateTaskDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + +/* +GetTask gets a specific task by ID +*/ +func (a *Client) GetTask(params *GetTaskParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*GetTaskOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewGetTaskParams() + } + op := &runtime.ClientOperation{ + ID: "get_task", + Method: "GET", + PathPattern: "/apis/v2beta1/tasks/{task_id}", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http", "https"}, + Params: params, + Reader: &GetTaskReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*GetTaskOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*GetTaskDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + +/* +ListTasks lists tasks with optional filtering +*/ +func (a *Client) ListTasks(params *ListTasksParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ListTasksOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewListTasksParams() + } + op := &runtime.ClientOperation{ + ID: "list_tasks", + Method: "GET", + PathPattern: "/apis/v2beta1/tasks", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http", "https"}, + Params: params, + Reader: &ListTasksReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*ListTasksOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*ListTasksDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + +/* +UpdateTask updates an existing task +*/ +func (a *Client) UpdateTask(params *UpdateTaskParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UpdateTaskOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewUpdateTaskParams() + } + op := &runtime.ClientOperation{ + ID: "update_task", + Method: "PATCH", + PathPattern: "/apis/v2beta1/tasks/{task_id}", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http", "https"}, + Params: params, + Reader: &UpdateTaskReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + } + for _, opt := range opts { + opt(op) + } + + result, err := a.transport.Submit(op) + if err != nil { + return nil, err + } + success, ok := result.(*UpdateTaskOK) + if ok { + return success, nil + } + // unexpected success response + unexpectedSuccess := result.(*UpdateTaskDefault) + return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code()) +} + // SetTransport changes the transport on the client func (a *Client) SetTransport(transport runtime.ClientTransport) { a.transport = transport diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_get_run_parameters.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_get_run_parameters.go index 0a8a491eddf..a952a6ffd7e 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_get_run_parameters.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_get_run_parameters.go @@ -73,6 +73,20 @@ type RunServiceGetRunParams struct { */ RunID string + /* View. + + Optional view mode. This field can be used to adjust + how detailed the Run object that is returned will be. + + - DEFAULT: By default `tasks` field is omitted. + This provides a faster and leaner run object. + - FULL: This view mode displays all the tasks for this run + with all its fields populated. + + Default: "DEFAULT" + */ + View *string + timeout time.Duration Context context.Context HTTPClient *http.Client @@ -90,7 +104,18 @@ func (o *RunServiceGetRunParams) WithDefaults() *RunServiceGetRunParams { // // All values with no default are reset to their zero value. func (o *RunServiceGetRunParams) SetDefaults() { - // no default values defined for this parameter + var ( + viewDefault = string("DEFAULT") + ) + + val := RunServiceGetRunParams{ + View: &viewDefault, + } + + val.timeout = o.timeout + val.Context = o.Context + val.HTTPClient = o.HTTPClient + *o = val } // WithTimeout adds the timeout to the run service get run params @@ -148,6 +173,17 @@ func (o *RunServiceGetRunParams) SetRunID(runID string) { o.RunID = runID } +// WithView adds the view to the run service get run params +func (o *RunServiceGetRunParams) WithView(view *string) *RunServiceGetRunParams { + o.SetView(view) + return o +} + +// SetView adds the view to the run service get run params +func (o *RunServiceGetRunParams) SetView(view *string) { + o.View = view +} + // WriteToRequest writes these params to a swagger request func (o *RunServiceGetRunParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { @@ -178,6 +214,23 @@ func (o *RunServiceGetRunParams) WriteToRequest(r runtime.ClientRequest, reg str return err } + if o.View != nil { + + // query param view + var qrView string + + if o.View != nil { + qrView = *o.View + } + qView := qrView + if qView != "" { + + if err := r.SetQueryParam("view", qView); err != nil { + return err + } + } + } + if len(res) > 0 { return errors.CompositeValidationError(res...) } diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_list_runs_parameters.go b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_list_runs_parameters.go index dc64db86ee5..e5ec0decea2 100644 --- a/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_list_runs_parameters.go +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/run_service_list_runs_parameters.go @@ -106,6 +106,20 @@ type RunServiceListRunsParams struct { */ SortBy *string + /* View. + + Optional view mode. This field can be used to adjust + how detailed the Run object that is returned will be. + + - DEFAULT: By default `tasks` field is omitted. + This provides a faster and leaner run object. + - FULL: This view mode displays all the tasks for this run + with all its fields populated. + + Default: "DEFAULT" + */ + View *string + timeout time.Duration Context context.Context HTTPClient *http.Client @@ -123,7 +137,18 @@ func (o *RunServiceListRunsParams) WithDefaults() *RunServiceListRunsParams { // // All values with no default are reset to their zero value. func (o *RunServiceListRunsParams) SetDefaults() { - // no default values defined for this parameter + var ( + viewDefault = string("DEFAULT") + ) + + val := RunServiceListRunsParams{ + View: &viewDefault, + } + + val.timeout = o.timeout + val.Context = o.Context + val.HTTPClient = o.HTTPClient + *o = val } // WithTimeout adds the timeout to the run service list runs params @@ -225,6 +250,17 @@ func (o *RunServiceListRunsParams) SetSortBy(sortBy *string) { o.SortBy = sortBy } +// WithView adds the view to the run service list runs params +func (o *RunServiceListRunsParams) WithView(view *string) *RunServiceListRunsParams { + o.SetView(view) + return o +} + +// SetView adds the view to the run service list runs params +func (o *RunServiceListRunsParams) SetView(view *string) { + o.View = view +} + // WriteToRequest writes these params to a swagger request func (o *RunServiceListRunsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { @@ -335,6 +371,23 @@ func (o *RunServiceListRunsParams) WriteToRequest(r runtime.ClientRequest, reg s } } + if o.View != nil { + + // query param view + var qrView string + + if o.View != nil { + qrView = *o.View + } + qView := qrView + if qView != "" { + + if err := r.SetQueryParam("view", qView); err != nil { + return err + } + } + } + if len(res) > 0 { return errors.CompositeValidationError(res...) } diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/update_task_parameters.go b/backend/api/v2beta1/go_http_client/run_client/run_service/update_task_parameters.go new file mode 100644 index 00000000000..8dc2643c1a3 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/update_task_parameters.go @@ -0,0 +1,169 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/strfmt" + + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" +) + +// NewUpdateTaskParams creates a new UpdateTaskParams object, +// with the default timeout for this client. +// +// Default values are not hydrated, since defaults are normally applied by the API server side. +// +// To enforce default values in parameter, use SetDefaults or WithDefaults. +func NewUpdateTaskParams() *UpdateTaskParams { + return &UpdateTaskParams{ + timeout: cr.DefaultTimeout, + } +} + +// NewUpdateTaskParamsWithTimeout creates a new UpdateTaskParams object +// with the ability to set a timeout on a request. +func NewUpdateTaskParamsWithTimeout(timeout time.Duration) *UpdateTaskParams { + return &UpdateTaskParams{ + timeout: timeout, + } +} + +// NewUpdateTaskParamsWithContext creates a new UpdateTaskParams object +// with the ability to set a context for a request. +func NewUpdateTaskParamsWithContext(ctx context.Context) *UpdateTaskParams { + return &UpdateTaskParams{ + Context: ctx, + } +} + +// NewUpdateTaskParamsWithHTTPClient creates a new UpdateTaskParams object +// with the ability to set a custom HTTPClient for a request. +func NewUpdateTaskParamsWithHTTPClient(client *http.Client) *UpdateTaskParams { + return &UpdateTaskParams{ + HTTPClient: client, + } +} + +/* +UpdateTaskParams contains all the parameters to send to the API endpoint + + for the update task operation. + + Typically these are written to a http.Request. +*/ +type UpdateTaskParams struct { + + // Task. + Task *run_model.V2beta1PipelineTaskDetail + + // TaskID. + TaskID string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithDefaults hydrates default values in the update task params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *UpdateTaskParams) WithDefaults() *UpdateTaskParams { + o.SetDefaults() + return o +} + +// SetDefaults hydrates default values in the update task params (not the query body). +// +// All values with no default are reset to their zero value. +func (o *UpdateTaskParams) SetDefaults() { + // no default values defined for this parameter +} + +// WithTimeout adds the timeout to the update task params +func (o *UpdateTaskParams) WithTimeout(timeout time.Duration) *UpdateTaskParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the update task params +func (o *UpdateTaskParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the update task params +func (o *UpdateTaskParams) WithContext(ctx context.Context) *UpdateTaskParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the update task params +func (o *UpdateTaskParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the update task params +func (o *UpdateTaskParams) WithHTTPClient(client *http.Client) *UpdateTaskParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the update task params +func (o *UpdateTaskParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithTask adds the task to the update task params +func (o *UpdateTaskParams) WithTask(task *run_model.V2beta1PipelineTaskDetail) *UpdateTaskParams { + o.SetTask(task) + return o +} + +// SetTask adds the task to the update task params +func (o *UpdateTaskParams) SetTask(task *run_model.V2beta1PipelineTaskDetail) { + o.Task = task +} + +// WithTaskID adds the taskID to the update task params +func (o *UpdateTaskParams) WithTaskID(taskID string) *UpdateTaskParams { + o.SetTaskID(taskID) + return o +} + +// SetTaskID adds the taskId to the update task params +func (o *UpdateTaskParams) SetTaskID(taskID string) { + o.TaskID = taskID +} + +// WriteToRequest writes these params to a swagger request +func (o *UpdateTaskParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + if o.Task != nil { + if err := r.SetBodyParam(o.Task); err != nil { + return err + } + } + + // path param task_id + if err := r.SetPathParam("task_id", o.TaskID); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_client/run_service/update_task_responses.go b/backend/api/v2beta1/go_http_client/run_client/run_service/update_task_responses.go new file mode 100644 index 00000000000..6082fe685cc --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_client/run_service/update_task_responses.go @@ -0,0 +1,187 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" +) + +// UpdateTaskReader is a Reader for the UpdateTask structure. +type UpdateTaskReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *UpdateTaskReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + case 200: + result := NewUpdateTaskOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + default: + result := NewUpdateTaskDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewUpdateTaskOK creates a UpdateTaskOK with default headers values +func NewUpdateTaskOK() *UpdateTaskOK { + return &UpdateTaskOK{} +} + +/* +UpdateTaskOK describes a response with status code 200, with default header values. + +A successful response. +*/ +type UpdateTaskOK struct { + Payload *run_model.V2beta1PipelineTaskDetail +} + +// IsSuccess returns true when this update task o k response has a 2xx status code +func (o *UpdateTaskOK) IsSuccess() bool { + return true +} + +// IsRedirect returns true when this update task o k response has a 3xx status code +func (o *UpdateTaskOK) IsRedirect() bool { + return false +} + +// IsClientError returns true when this update task o k response has a 4xx status code +func (o *UpdateTaskOK) IsClientError() bool { + return false +} + +// IsServerError returns true when this update task o k response has a 5xx status code +func (o *UpdateTaskOK) IsServerError() bool { + return false +} + +// IsCode returns true when this update task o k response a status code equal to that given +func (o *UpdateTaskOK) IsCode(code int) bool { + return code == 200 +} + +// Code gets the status code for the update task o k response +func (o *UpdateTaskOK) Code() int { + return 200 +} + +func (o *UpdateTaskOK) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[PATCH /apis/v2beta1/tasks/{task_id}][%d] updateTaskOK %s", 200, payload) +} + +func (o *UpdateTaskOK) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[PATCH /apis/v2beta1/tasks/{task_id}][%d] updateTaskOK %s", 200, payload) +} + +func (o *UpdateTaskOK) GetPayload() *run_model.V2beta1PipelineTaskDetail { + return o.Payload +} + +func (o *UpdateTaskOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(run_model.V2beta1PipelineTaskDetail) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewUpdateTaskDefault creates a UpdateTaskDefault with default headers values +func NewUpdateTaskDefault(code int) *UpdateTaskDefault { + return &UpdateTaskDefault{ + _statusCode: code, + } +} + +/* +UpdateTaskDefault describes a response with status code -1, with default header values. + +An unexpected error response. +*/ +type UpdateTaskDefault struct { + _statusCode int + + Payload *run_model.GooglerpcStatus +} + +// IsSuccess returns true when this update task default response has a 2xx status code +func (o *UpdateTaskDefault) IsSuccess() bool { + return o._statusCode/100 == 2 +} + +// IsRedirect returns true when this update task default response has a 3xx status code +func (o *UpdateTaskDefault) IsRedirect() bool { + return o._statusCode/100 == 3 +} + +// IsClientError returns true when this update task default response has a 4xx status code +func (o *UpdateTaskDefault) IsClientError() bool { + return o._statusCode/100 == 4 +} + +// IsServerError returns true when this update task default response has a 5xx status code +func (o *UpdateTaskDefault) IsServerError() bool { + return o._statusCode/100 == 5 +} + +// IsCode returns true when this update task default response a status code equal to that given +func (o *UpdateTaskDefault) IsCode(code int) bool { + return o._statusCode == code +} + +// Code gets the status code for the update task default response +func (o *UpdateTaskDefault) Code() int { + return o._statusCode +} + +func (o *UpdateTaskDefault) Error() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[PATCH /apis/v2beta1/tasks/{task_id}][%d] update_task default %s", o._statusCode, payload) +} + +func (o *UpdateTaskDefault) String() string { + payload, _ := json.Marshal(o.Payload) + return fmt.Sprintf("[PATCH /apis/v2beta1/tasks/{task_id}][%d] update_task default %s", o._statusCode, payload) +} + +func (o *UpdateTaskDefault) GetPayload() *run_model.GooglerpcStatus { + return o.Payload +} + +func (o *UpdateTaskDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(run_model.GooglerpcStatus) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/artifact_artifact_type.go b/backend/api/v2beta1/go_http_client/run_model/artifact_artifact_type.go new file mode 100644 index 00000000000..fce1524623f --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_model/artifact_artifact_type.go @@ -0,0 +1,100 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "encoding/json" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/validate" +) + +// ArtifactArtifactType - TYPE_UNSPECIFIED: default; treated as "not set" +// reject if unset. +// +// swagger:model ArtifactArtifactType +type ArtifactArtifactType string + +func NewArtifactArtifactType(value ArtifactArtifactType) *ArtifactArtifactType { + return &value +} + +// Pointer returns a pointer to a freshly-allocated ArtifactArtifactType. +func (m ArtifactArtifactType) Pointer() *ArtifactArtifactType { + return &m +} + +const ( + + // ArtifactArtifactTypeTYPEUNSPECIFIED captures enum value "TYPE_UNSPECIFIED" + ArtifactArtifactTypeTYPEUNSPECIFIED ArtifactArtifactType = "TYPE_UNSPECIFIED" + + // ArtifactArtifactTypeArtifact captures enum value "Artifact" + ArtifactArtifactTypeArtifact ArtifactArtifactType = "Artifact" + + // ArtifactArtifactTypeModel captures enum value "Model" + ArtifactArtifactTypeModel ArtifactArtifactType = "Model" + + // ArtifactArtifactTypeDataset captures enum value "Dataset" + ArtifactArtifactTypeDataset ArtifactArtifactType = "Dataset" + + // ArtifactArtifactTypeHTML captures enum value "HTML" + ArtifactArtifactTypeHTML ArtifactArtifactType = "HTML" + + // ArtifactArtifactTypeMarkdown captures enum value "Markdown" + ArtifactArtifactTypeMarkdown ArtifactArtifactType = "Markdown" + + // ArtifactArtifactTypeMetric captures enum value "Metric" + ArtifactArtifactTypeMetric ArtifactArtifactType = "Metric" + + // ArtifactArtifactTypeClassificationMetric captures enum value "ClassificationMetric" + ArtifactArtifactTypeClassificationMetric ArtifactArtifactType = "ClassificationMetric" + + // ArtifactArtifactTypeSlicedClassificationMetric captures enum value "SlicedClassificationMetric" + ArtifactArtifactTypeSlicedClassificationMetric ArtifactArtifactType = "SlicedClassificationMetric" +) + +// for schema +var artifactArtifactTypeEnum []interface{} + +func init() { + var res []ArtifactArtifactType + if err := json.Unmarshal([]byte(`["TYPE_UNSPECIFIED","Artifact","Model","Dataset","HTML","Markdown","Metric","ClassificationMetric","SlicedClassificationMetric"]`), &res); err != nil { + panic(err) + } + for _, v := range res { + artifactArtifactTypeEnum = append(artifactArtifactTypeEnum, v) + } +} + +func (m ArtifactArtifactType) validateArtifactArtifactTypeEnum(path, location string, value ArtifactArtifactType) error { + if err := validate.EnumCase(path, location, value, artifactArtifactTypeEnum, true); err != nil { + return err + } + return nil +} + +// Validate validates this artifact artifact type +func (m ArtifactArtifactType) Validate(formats strfmt.Registry) error { + var res []error + + // value enum + if err := m.validateArtifactArtifactTypeEnum("", "body", m); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +// ContextValidate validates this artifact artifact type based on context it is used +func (m ArtifactArtifactType) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/input_outputs_i_o_artifact.go b/backend/api/v2beta1/go_http_client/run_model/input_outputs_i_o_artifact.go new file mode 100644 index 00000000000..6108ac5938c --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_model/input_outputs_i_o_artifact.go @@ -0,0 +1,226 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "strconv" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// InputOutputsIOArtifact Align structure with Executor Input +// +// swagger:model InputOutputsIOArtifact +type InputOutputsIOArtifact struct { + + // artifact key + ArtifactKey string `json:"artifact_key,omitempty"` + + // artifacts + Artifacts []*V2beta1Artifact `json:"artifacts"` + + // producer + Producer *V2beta1IOProducer `json:"producer,omitempty"` + + // type + Type *V2beta1IOType `json:"type,omitempty"` +} + +// Validate validates this input outputs i o artifact +func (m *InputOutputsIOArtifact) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateArtifacts(formats); err != nil { + res = append(res, err) + } + + if err := m.validateProducer(formats); err != nil { + res = append(res, err) + } + + if err := m.validateType(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *InputOutputsIOArtifact) validateArtifacts(formats strfmt.Registry) error { + if swag.IsZero(m.Artifacts) { // not required + return nil + } + + for i := 0; i < len(m.Artifacts); i++ { + if swag.IsZero(m.Artifacts[i]) { // not required + continue + } + + if m.Artifacts[i] != nil { + if err := m.Artifacts[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("artifacts" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("artifacts" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +func (m *InputOutputsIOArtifact) validateProducer(formats strfmt.Registry) error { + if swag.IsZero(m.Producer) { // not required + return nil + } + + if m.Producer != nil { + if err := m.Producer.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("producer") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("producer") + } + return err + } + } + + return nil +} + +func (m *InputOutputsIOArtifact) validateType(formats strfmt.Registry) error { + if swag.IsZero(m.Type) { // not required + return nil + } + + if m.Type != nil { + if err := m.Type.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +// ContextValidate validate this input outputs i o artifact based on the context it is used +func (m *InputOutputsIOArtifact) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateArtifacts(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateProducer(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateType(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *InputOutputsIOArtifact) contextValidateArtifacts(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Artifacts); i++ { + + if m.Artifacts[i] != nil { + + if swag.IsZero(m.Artifacts[i]) { // not required + return nil + } + + if err := m.Artifacts[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("artifacts" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("artifacts" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +func (m *InputOutputsIOArtifact) contextValidateProducer(ctx context.Context, formats strfmt.Registry) error { + + if m.Producer != nil { + + if swag.IsZero(m.Producer) { // not required + return nil + } + + if err := m.Producer.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("producer") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("producer") + } + return err + } + } + + return nil +} + +func (m *InputOutputsIOArtifact) contextValidateType(ctx context.Context, formats strfmt.Registry) error { + + if m.Type != nil { + + if swag.IsZero(m.Type) { // not required + return nil + } + + if err := m.Type.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +// MarshalBinary interface implementation +func (m *InputOutputsIOArtifact) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *InputOutputsIOArtifact) UnmarshalBinary(b []byte) error { + var res InputOutputsIOArtifact + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/input_outputs_i_o_parameter.go b/backend/api/v2beta1/go_http_client/run_model/input_outputs_i_o_parameter.go new file mode 100644 index 00000000000..74524f1d899 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_model/input_outputs_i_o_parameter.go @@ -0,0 +1,168 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// InputOutputsIOParameter input outputs i o parameter +// +// swagger:model InputOutputsIOParameter +type InputOutputsIOParameter struct { + + // parameter key + ParameterKey string `json:"parameter_key,omitempty"` + + // This field is optional because in the case of + // Input RuntimeValues, ComponentDefaultInputs, + // and Raw Iterator Input there are no producers. + Producer *V2beta1IOProducer `json:"producer,omitempty"` + + // type + Type *V2beta1IOType `json:"type,omitempty"` + + // value + Value interface{} `json:"value,omitempty"` +} + +// Validate validates this input outputs i o parameter +func (m *InputOutputsIOParameter) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateProducer(formats); err != nil { + res = append(res, err) + } + + if err := m.validateType(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *InputOutputsIOParameter) validateProducer(formats strfmt.Registry) error { + if swag.IsZero(m.Producer) { // not required + return nil + } + + if m.Producer != nil { + if err := m.Producer.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("producer") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("producer") + } + return err + } + } + + return nil +} + +func (m *InputOutputsIOParameter) validateType(formats strfmt.Registry) error { + if swag.IsZero(m.Type) { // not required + return nil + } + + if m.Type != nil { + if err := m.Type.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +// ContextValidate validate this input outputs i o parameter based on the context it is used +func (m *InputOutputsIOParameter) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateProducer(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateType(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *InputOutputsIOParameter) contextValidateProducer(ctx context.Context, formats strfmt.Registry) error { + + if m.Producer != nil { + + if swag.IsZero(m.Producer) { // not required + return nil + } + + if err := m.Producer.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("producer") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("producer") + } + return err + } + } + + return nil +} + +func (m *InputOutputsIOParameter) contextValidateType(ctx context.Context, formats strfmt.Registry) error { + + if m.Type != nil { + + if swag.IsZero(m.Type) { // not required + return nil + } + + if err := m.Type.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +// MarshalBinary interface implementation +func (m *InputOutputsIOParameter) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *InputOutputsIOParameter) UnmarshalBinary(b []byte) error { + var res InputOutputsIOParameter + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_child_task.go b/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_child_task.go index 52c8eda15d7..13c1be6bd24 100644 --- a/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_child_task.go +++ b/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_child_task.go @@ -18,9 +18,8 @@ import ( // swagger:model PipelineTaskDetailChildTask type PipelineTaskDetailChildTask struct { - // Name of the corresponding pod assigned by the orchestration engine. - // Also known as node_id. - PodName string `json:"pod_name,omitempty"` + // name + Name string `json:"name,omitempty"` // System-generated ID of a task. TaskID string `json:"task_id,omitempty"` diff --git a/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_input_outputs.go b/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_input_outputs.go new file mode 100644 index 00000000000..c9c5d556bd2 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_input_outputs.go @@ -0,0 +1,185 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "strconv" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// PipelineTaskDetailInputOutputs pipeline task detail input outputs +// +// swagger:model PipelineTaskDetailInputOutputs +type PipelineTaskDetailInputOutputs struct { + + // Output Only. To create Artifacts for a task use + // ArtifactTasks to link artifacts to tasks. + Artifacts []*InputOutputsIOArtifact `json:"artifacts"` + + // For Loops parameters are filled with resolved + // parameterIterator.items + Parameters []*InputOutputsIOParameter `json:"parameters"` +} + +// Validate validates this pipeline task detail input outputs +func (m *PipelineTaskDetailInputOutputs) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateArtifacts(formats); err != nil { + res = append(res, err) + } + + if err := m.validateParameters(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *PipelineTaskDetailInputOutputs) validateArtifacts(formats strfmt.Registry) error { + if swag.IsZero(m.Artifacts) { // not required + return nil + } + + for i := 0; i < len(m.Artifacts); i++ { + if swag.IsZero(m.Artifacts[i]) { // not required + continue + } + + if m.Artifacts[i] != nil { + if err := m.Artifacts[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("artifacts" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("artifacts" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +func (m *PipelineTaskDetailInputOutputs) validateParameters(formats strfmt.Registry) error { + if swag.IsZero(m.Parameters) { // not required + return nil + } + + for i := 0; i < len(m.Parameters); i++ { + if swag.IsZero(m.Parameters[i]) { // not required + continue + } + + if m.Parameters[i] != nil { + if err := m.Parameters[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("parameters" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("parameters" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this pipeline task detail input outputs based on the context it is used +func (m *PipelineTaskDetailInputOutputs) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateArtifacts(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateParameters(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *PipelineTaskDetailInputOutputs) contextValidateArtifacts(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Artifacts); i++ { + + if m.Artifacts[i] != nil { + + if swag.IsZero(m.Artifacts[i]) { // not required + return nil + } + + if err := m.Artifacts[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("artifacts" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("artifacts" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +func (m *PipelineTaskDetailInputOutputs) contextValidateParameters(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Parameters); i++ { + + if m.Parameters[i] != nil { + + if swag.IsZero(m.Parameters[i]) { // not required + return nil + } + + if err := m.Parameters[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("parameters" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("parameters" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *PipelineTaskDetailInputOutputs) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *PipelineTaskDetailInputOutputs) UnmarshalBinary(b []byte) error { + var res PipelineTaskDetailInputOutputs + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_status_metadata.go b/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_status_metadata.go new file mode 100644 index 00000000000..04ff0c6d601 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_status_metadata.go @@ -0,0 +1,56 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// PipelineTaskDetailStatusMetadata pipeline task detail status metadata +// +// swagger:model PipelineTaskDetailStatusMetadata +type PipelineTaskDetailStatusMetadata struct { + + // Custom status metadata, this can be used to provide + // additional status info for a given task during runtime + // This is currently not utilized by KFP backend. + CustomProperties map[string]interface{} `json:"custom_properties,omitempty"` + + // KFP Backend will populate this field with error messages + // if any are available on a Failed task. + Message string `json:"message,omitempty"` +} + +// Validate validates this pipeline task detail status metadata +func (m *PipelineTaskDetailStatusMetadata) Validate(formats strfmt.Registry) error { + return nil +} + +// ContextValidate validates this pipeline task detail status metadata based on context it is used +func (m *PipelineTaskDetailStatusMetadata) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + +// MarshalBinary interface implementation +func (m *PipelineTaskDetailStatusMetadata) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *PipelineTaskDetailStatusMetadata) UnmarshalBinary(b []byte) error { + var res PipelineTaskDetailStatusMetadata + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_task_pod.go b/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_task_pod.go new file mode 100644 index 00000000000..2ee5fa267f9 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_task_pod.go @@ -0,0 +1,115 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// PipelineTaskDetailTaskPod pipeline task detail task pod +// +// swagger:model PipelineTaskDetailTaskPod +type PipelineTaskDetailTaskPod struct { + + // name + Name string `json:"name,omitempty"` + + // type + Type *PipelineTaskDetailTaskPodType `json:"type,omitempty"` + + // uid + UID string `json:"uid,omitempty"` +} + +// Validate validates this pipeline task detail task pod +func (m *PipelineTaskDetailTaskPod) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateType(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *PipelineTaskDetailTaskPod) validateType(formats strfmt.Registry) error { + if swag.IsZero(m.Type) { // not required + return nil + } + + if m.Type != nil { + if err := m.Type.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +// ContextValidate validate this pipeline task detail task pod based on the context it is used +func (m *PipelineTaskDetailTaskPod) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateType(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *PipelineTaskDetailTaskPod) contextValidateType(ctx context.Context, formats strfmt.Registry) error { + + if m.Type != nil { + + if swag.IsZero(m.Type) { // not required + return nil + } + + if err := m.Type.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +// MarshalBinary interface implementation +func (m *PipelineTaskDetailTaskPod) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *PipelineTaskDetailTaskPod) UnmarshalBinary(b []byte) error { + var res PipelineTaskDetailTaskPod + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_task_pod_type.go b/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_task_pod_type.go new file mode 100644 index 00000000000..587a097fdc3 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_task_pod_type.go @@ -0,0 +1,81 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "encoding/json" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/validate" +) + +// PipelineTaskDetailTaskPodType pipeline task detail task pod type +// +// swagger:model PipelineTaskDetailTaskPodType +type PipelineTaskDetailTaskPodType string + +func NewPipelineTaskDetailTaskPodType(value PipelineTaskDetailTaskPodType) *PipelineTaskDetailTaskPodType { + return &value +} + +// Pointer returns a pointer to a freshly-allocated PipelineTaskDetailTaskPodType. +func (m PipelineTaskDetailTaskPodType) Pointer() *PipelineTaskDetailTaskPodType { + return &m +} + +const ( + + // PipelineTaskDetailTaskPodTypeUNSPECIFIED captures enum value "UNSPECIFIED" + PipelineTaskDetailTaskPodTypeUNSPECIFIED PipelineTaskDetailTaskPodType = "UNSPECIFIED" + + // PipelineTaskDetailTaskPodTypeDRIVER captures enum value "DRIVER" + PipelineTaskDetailTaskPodTypeDRIVER PipelineTaskDetailTaskPodType = "DRIVER" + + // PipelineTaskDetailTaskPodTypeEXECUTOR captures enum value "EXECUTOR" + PipelineTaskDetailTaskPodTypeEXECUTOR PipelineTaskDetailTaskPodType = "EXECUTOR" +) + +// for schema +var pipelineTaskDetailTaskPodTypeEnum []interface{} + +func init() { + var res []PipelineTaskDetailTaskPodType + if err := json.Unmarshal([]byte(`["UNSPECIFIED","DRIVER","EXECUTOR"]`), &res); err != nil { + panic(err) + } + for _, v := range res { + pipelineTaskDetailTaskPodTypeEnum = append(pipelineTaskDetailTaskPodTypeEnum, v) + } +} + +func (m PipelineTaskDetailTaskPodType) validatePipelineTaskDetailTaskPodTypeEnum(path, location string, value PipelineTaskDetailTaskPodType) error { + if err := validate.EnumCase(path, location, value, pipelineTaskDetailTaskPodTypeEnum, true); err != nil { + return err + } + return nil +} + +// Validate validates this pipeline task detail task pod type +func (m PipelineTaskDetailTaskPodType) Validate(formats strfmt.Registry) error { + var res []error + + // value enum + if err := m.validatePipelineTaskDetailTaskPodTypeEnum("", "body", m); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +// ContextValidate validates this pipeline task detail task pod type based on context it is used +func (m PipelineTaskDetailTaskPodType) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_task_state.go b/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_task_state.go new file mode 100644 index 00000000000..d8d0b996ca6 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_task_state.go @@ -0,0 +1,96 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "encoding/json" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/validate" +) + +// PipelineTaskDetailTaskState Runtime state of a Task +// +// - RUNTIME_STATE_UNSPECIFIED: Default value. This value is not used. +// - RUNNING: Entity execution is in progress. +// - SUCCEEDED: Entity completed successfully. +// - SKIPPED: Entity has been skipped. For example, due to caching. +// - FAILED: Entity execution has failed. +// +// swagger:model PipelineTaskDetailTaskState +type PipelineTaskDetailTaskState string + +func NewPipelineTaskDetailTaskState(value PipelineTaskDetailTaskState) *PipelineTaskDetailTaskState { + return &value +} + +// Pointer returns a pointer to a freshly-allocated PipelineTaskDetailTaskState. +func (m PipelineTaskDetailTaskState) Pointer() *PipelineTaskDetailTaskState { + return &m +} + +const ( + + // PipelineTaskDetailTaskStateRUNTIMESTATEUNSPECIFIED captures enum value "RUNTIME_STATE_UNSPECIFIED" + PipelineTaskDetailTaskStateRUNTIMESTATEUNSPECIFIED PipelineTaskDetailTaskState = "RUNTIME_STATE_UNSPECIFIED" + + // PipelineTaskDetailTaskStateRUNNING captures enum value "RUNNING" + PipelineTaskDetailTaskStateRUNNING PipelineTaskDetailTaskState = "RUNNING" + + // PipelineTaskDetailTaskStateSUCCEEDED captures enum value "SUCCEEDED" + PipelineTaskDetailTaskStateSUCCEEDED PipelineTaskDetailTaskState = "SUCCEEDED" + + // PipelineTaskDetailTaskStateSKIPPED captures enum value "SKIPPED" + PipelineTaskDetailTaskStateSKIPPED PipelineTaskDetailTaskState = "SKIPPED" + + // PipelineTaskDetailTaskStateFAILED captures enum value "FAILED" + PipelineTaskDetailTaskStateFAILED PipelineTaskDetailTaskState = "FAILED" + + // PipelineTaskDetailTaskStateCACHED captures enum value "CACHED" + PipelineTaskDetailTaskStateCACHED PipelineTaskDetailTaskState = "CACHED" +) + +// for schema +var pipelineTaskDetailTaskStateEnum []interface{} + +func init() { + var res []PipelineTaskDetailTaskState + if err := json.Unmarshal([]byte(`["RUNTIME_STATE_UNSPECIFIED","RUNNING","SUCCEEDED","SKIPPED","FAILED","CACHED"]`), &res); err != nil { + panic(err) + } + for _, v := range res { + pipelineTaskDetailTaskStateEnum = append(pipelineTaskDetailTaskStateEnum, v) + } +} + +func (m PipelineTaskDetailTaskState) validatePipelineTaskDetailTaskStateEnum(path, location string, value PipelineTaskDetailTaskState) error { + if err := validate.EnumCase(path, location, value, pipelineTaskDetailTaskStateEnum, true); err != nil { + return err + } + return nil +} + +// Validate validates this pipeline task detail task state +func (m PipelineTaskDetailTaskState) Validate(formats strfmt.Registry) error { + var res []error + + // value enum + if err := m.validatePipelineTaskDetailTaskStateEnum("", "body", m); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +// ContextValidate validates this pipeline task detail task state based on context it is used +func (m PipelineTaskDetailTaskState) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_task_status.go b/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_task_status.go new file mode 100644 index 00000000000..a103e22493d --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_task_status.go @@ -0,0 +1,181 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" + "github.com/go-openapi/validate" +) + +// PipelineTaskDetailTaskStatus Timestamped representation of a Task state with an optional error. +// +// swagger:model PipelineTaskDetailTaskStatus +type PipelineTaskDetailTaskStatus struct { + + // error + Error *GooglerpcStatus `json:"error,omitempty"` + + // state + State *PipelineTaskDetailTaskState `json:"state,omitempty"` + + // update time + // Format: date-time + UpdateTime strfmt.DateTime `json:"update_time,omitempty"` +} + +// Validate validates this pipeline task detail task status +func (m *PipelineTaskDetailTaskStatus) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateError(formats); err != nil { + res = append(res, err) + } + + if err := m.validateState(formats); err != nil { + res = append(res, err) + } + + if err := m.validateUpdateTime(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *PipelineTaskDetailTaskStatus) validateError(formats strfmt.Registry) error { + if swag.IsZero(m.Error) { // not required + return nil + } + + if m.Error != nil { + if err := m.Error.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("error") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("error") + } + return err + } + } + + return nil +} + +func (m *PipelineTaskDetailTaskStatus) validateState(formats strfmt.Registry) error { + if swag.IsZero(m.State) { // not required + return nil + } + + if m.State != nil { + if err := m.State.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("state") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("state") + } + return err + } + } + + return nil +} + +func (m *PipelineTaskDetailTaskStatus) validateUpdateTime(formats strfmt.Registry) error { + if swag.IsZero(m.UpdateTime) { // not required + return nil + } + + if err := validate.FormatOf("update_time", "body", "date-time", m.UpdateTime.String(), formats); err != nil { + return err + } + + return nil +} + +// ContextValidate validate this pipeline task detail task status based on the context it is used +func (m *PipelineTaskDetailTaskStatus) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateError(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateState(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *PipelineTaskDetailTaskStatus) contextValidateError(ctx context.Context, formats strfmt.Registry) error { + + if m.Error != nil { + + if swag.IsZero(m.Error) { // not required + return nil + } + + if err := m.Error.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("error") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("error") + } + return err + } + } + + return nil +} + +func (m *PipelineTaskDetailTaskStatus) contextValidateState(ctx context.Context, formats strfmt.Registry) error { + + if m.State != nil { + + if swag.IsZero(m.State) { // not required + return nil + } + + if err := m.State.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("state") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("state") + } + return err + } + } + + return nil +} + +// MarshalBinary interface implementation +func (m *PipelineTaskDetailTaskStatus) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *PipelineTaskDetailTaskStatus) UnmarshalBinary(b []byte) error { + var res PipelineTaskDetailTaskStatus + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_task_type.go b/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_task_type.go new file mode 100644 index 00000000000..3cda04038bd --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_task_type.go @@ -0,0 +1,112 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "encoding/json" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/validate" +) + +// PipelineTaskDetailTaskType - ROOT: Root task is the top ancestor task to all tasks in the pipeline run +// It is the only task with no parent task in a Pipeline Run. +// - RUNTIME: All child tasks in the Run DAG are Runtime tasks. With the exception +// +// of K8S driver pods. +// These tasks are the only tasks that have Executor Pods. +// - CONDITION_BRANCH: Condition Branch is the wrapper task of an If block +// - CONDITION: Condition is an individual "if" branch, and is +// +// a child to a CONDITION_BRANCH task. +// - LOOP: Task Group for CONDITION_BRANCH +// +// Task Group for RUNTIME Loop Iterations +// - DAG: Generic DAG task type for types like Nested Pipelines +// +// where there is no declarative way to detect this within +// a driver. +// +// swagger:model PipelineTaskDetailTaskType +type PipelineTaskDetailTaskType string + +func NewPipelineTaskDetailTaskType(value PipelineTaskDetailTaskType) *PipelineTaskDetailTaskType { + return &value +} + +// Pointer returns a pointer to a freshly-allocated PipelineTaskDetailTaskType. +func (m PipelineTaskDetailTaskType) Pointer() *PipelineTaskDetailTaskType { + return &m +} + +const ( + + // PipelineTaskDetailTaskTypeROOT captures enum value "ROOT" + PipelineTaskDetailTaskTypeROOT PipelineTaskDetailTaskType = "ROOT" + + // PipelineTaskDetailTaskTypeRUNTIME captures enum value "RUNTIME" + PipelineTaskDetailTaskTypeRUNTIME PipelineTaskDetailTaskType = "RUNTIME" + + // PipelineTaskDetailTaskTypeCONDITIONBRANCH captures enum value "CONDITION_BRANCH" + PipelineTaskDetailTaskTypeCONDITIONBRANCH PipelineTaskDetailTaskType = "CONDITION_BRANCH" + + // PipelineTaskDetailTaskTypeCONDITION captures enum value "CONDITION" + PipelineTaskDetailTaskTypeCONDITION PipelineTaskDetailTaskType = "CONDITION" + + // PipelineTaskDetailTaskTypeLOOP captures enum value "LOOP" + PipelineTaskDetailTaskTypeLOOP PipelineTaskDetailTaskType = "LOOP" + + // PipelineTaskDetailTaskTypeEXITHANDLER captures enum value "EXIT_HANDLER" + PipelineTaskDetailTaskTypeEXITHANDLER PipelineTaskDetailTaskType = "EXIT_HANDLER" + + // PipelineTaskDetailTaskTypeIMPORTER captures enum value "IMPORTER" + PipelineTaskDetailTaskTypeIMPORTER PipelineTaskDetailTaskType = "IMPORTER" + + // PipelineTaskDetailTaskTypeDAG captures enum value "DAG" + PipelineTaskDetailTaskTypeDAG PipelineTaskDetailTaskType = "DAG" +) + +// for schema +var pipelineTaskDetailTaskTypeEnum []interface{} + +func init() { + var res []PipelineTaskDetailTaskType + if err := json.Unmarshal([]byte(`["ROOT","RUNTIME","CONDITION_BRANCH","CONDITION","LOOP","EXIT_HANDLER","IMPORTER","DAG"]`), &res); err != nil { + panic(err) + } + for _, v := range res { + pipelineTaskDetailTaskTypeEnum = append(pipelineTaskDetailTaskTypeEnum, v) + } +} + +func (m PipelineTaskDetailTaskType) validatePipelineTaskDetailTaskTypeEnum(path, location string, value PipelineTaskDetailTaskType) error { + if err := validate.EnumCase(path, location, value, pipelineTaskDetailTaskTypeEnum, true); err != nil { + return err + } + return nil +} + +// Validate validates this pipeline task detail task type +func (m PipelineTaskDetailTaskType) Validate(formats strfmt.Registry) error { + var res []error + + // value enum + if err := m.validatePipelineTaskDetailTaskTypeEnum("", "body", m); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +// ContextValidate validates this pipeline task detail task type based on context it is used +func (m PipelineTaskDetailTaskType) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_type_attributes.go b/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_type_attributes.go new file mode 100644 index 00000000000..8ddd5456307 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_model/pipeline_task_detail_type_attributes.go @@ -0,0 +1,53 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// PipelineTaskDetailTypeAttributes pipeline task detail type attributes +// +// swagger:model PipelineTaskDetailTypeAttributes +type PipelineTaskDetailTypeAttributes struct { + + // Optional. Applies to type LOOP + IterationCount string `json:"iteration_count,omitempty"` + + // Optional. Applies to type Runtime that is an iteration + IterationIndex string `json:"iteration_index,omitempty"` +} + +// Validate validates this pipeline task detail type attributes +func (m *PipelineTaskDetailTypeAttributes) Validate(formats strfmt.Registry) error { + return nil +} + +// ContextValidate validates this pipeline task detail type attributes based on context it is used +func (m *PipelineTaskDetailTypeAttributes) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + +// MarshalBinary interface implementation +func (m *PipelineTaskDetailTypeAttributes) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *PipelineTaskDetailTypeAttributes) UnmarshalBinary(b []byte) error { + var res PipelineTaskDetailTypeAttributes + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_artifact.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_artifact.go new file mode 100644 index 00000000000..fbd47462aa5 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_artifact.go @@ -0,0 +1,186 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" + "github.com/go-openapi/validate" +) + +// V2beta1Artifact Not to be confused with RuntimeArtifact in PipelineSpec +// +// swagger:model v2beta1Artifact +type V2beta1Artifact struct { + + // Output only. The unique server generated id of the artifact. + // Note: Updated id name to be consistent with other api naming patterns (with prefix) + // Read Only: true + ArtifactID string `json:"artifact_id,omitempty"` + + // Output only. Create time of the artifact in millisecond since epoch. + // Note: The type and name is updated from mlmd artifact to be consistent with other backend apis. + // Read Only: true + // Format: date-time + CreatedAt strfmt.DateTime `json:"created_at,omitempty"` + + // description + Description string `json:"description,omitempty"` + + // Optional. User provided custom properties which are not defined by its type. + Metadata map[string]interface{} `json:"metadata,omitempty"` + + // Required. The client provided name of the artifact. + // Note: in MLMD when name was set, it had to be unique for that type_id + // this restriction is removed here + // If this is a "Metric" artifact, the name of the metric + // is treated as the Key in its K/V pair. + Name string `json:"name,omitempty"` + + // namespace + Namespace string `json:"namespace,omitempty"` + + // Used primarily for metrics + NumberValue float64 `json:"number_value,omitempty"` + + // Required. The name of an ArtifactType. E.g. Dataset + Type *ArtifactArtifactType `json:"type,omitempty"` + + // The uniform resource identifier of the physical artifact. + // May be empty if there is no physical artifact. + URI string `json:"uri,omitempty"` +} + +// Validate validates this v2beta1 artifact +func (m *V2beta1Artifact) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateCreatedAt(formats); err != nil { + res = append(res, err) + } + + if err := m.validateType(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1Artifact) validateCreatedAt(formats strfmt.Registry) error { + if swag.IsZero(m.CreatedAt) { // not required + return nil + } + + if err := validate.FormatOf("created_at", "body", "date-time", m.CreatedAt.String(), formats); err != nil { + return err + } + + return nil +} + +func (m *V2beta1Artifact) validateType(formats strfmt.Registry) error { + if swag.IsZero(m.Type) { // not required + return nil + } + + if m.Type != nil { + if err := m.Type.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +// ContextValidate validate this v2beta1 artifact based on the context it is used +func (m *V2beta1Artifact) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateArtifactID(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateCreatedAt(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateType(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1Artifact) contextValidateArtifactID(ctx context.Context, formats strfmt.Registry) error { + + if err := validate.ReadOnly(ctx, "artifact_id", "body", string(m.ArtifactID)); err != nil { + return err + } + + return nil +} + +func (m *V2beta1Artifact) contextValidateCreatedAt(ctx context.Context, formats strfmt.Registry) error { + + if err := validate.ReadOnly(ctx, "created_at", "body", strfmt.DateTime(m.CreatedAt)); err != nil { + return err + } + + return nil +} + +func (m *V2beta1Artifact) contextValidateType(ctx context.Context, formats strfmt.Registry) error { + + if m.Type != nil { + + if swag.IsZero(m.Type) { // not required + return nil + } + + if err := m.Type.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +// MarshalBinary interface implementation +func (m *V2beta1Artifact) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *V2beta1Artifact) UnmarshalBinary(b []byte) error { + var res V2beta1Artifact + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_artifact_list.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_artifact_list.go deleted file mode 100644 index 3d833ad3e22..00000000000 --- a/backend/api/v2beta1/go_http_client/run_model/v2beta1_artifact_list.go +++ /dev/null @@ -1,50 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package run_model - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - - "github.com/go-openapi/strfmt" - "github.com/go-openapi/swag" -) - -// V2beta1ArtifactList A list of artifact metadata. -// -// swagger:model v2beta1ArtifactList -type V2beta1ArtifactList struct { - - // A list of artifact metadata ids. - ArtifactIds []string `json:"artifact_ids"` -} - -// Validate validates this v2beta1 artifact list -func (m *V2beta1ArtifactList) Validate(formats strfmt.Registry) error { - return nil -} - -// ContextValidate validates this v2beta1 artifact list based on context it is used -func (m *V2beta1ArtifactList) ContextValidate(ctx context.Context, formats strfmt.Registry) error { - return nil -} - -// MarshalBinary interface implementation -func (m *V2beta1ArtifactList) MarshalBinary() ([]byte, error) { - if m == nil { - return nil, nil - } - return swag.WriteJSON(m) -} - -// UnmarshalBinary interface implementation -func (m *V2beta1ArtifactList) UnmarshalBinary(b []byte) error { - var res V2beta1ArtifactList - if err := swag.ReadJSON(b, &res); err != nil { - return err - } - *m = res - return nil -} diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_get_run_request_view_mode.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_get_run_request_view_mode.go new file mode 100644 index 00000000000..dbe7c6aa30c --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_get_run_request_view_mode.go @@ -0,0 +1,82 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "encoding/json" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/validate" +) + +// V2beta1GetRunRequestViewMode - DEFAULT: By default `tasks` field is omitted. +// This provides a faster and leaner run object. +// - FULL: This view mode displays all the tasks for this run +// +// with all its fields populated. +// +// swagger:model v2beta1GetRunRequestViewMode +type V2beta1GetRunRequestViewMode string + +func NewV2beta1GetRunRequestViewMode(value V2beta1GetRunRequestViewMode) *V2beta1GetRunRequestViewMode { + return &value +} + +// Pointer returns a pointer to a freshly-allocated V2beta1GetRunRequestViewMode. +func (m V2beta1GetRunRequestViewMode) Pointer() *V2beta1GetRunRequestViewMode { + return &m +} + +const ( + + // V2beta1GetRunRequestViewModeDEFAULT captures enum value "DEFAULT" + V2beta1GetRunRequestViewModeDEFAULT V2beta1GetRunRequestViewMode = "DEFAULT" + + // V2beta1GetRunRequestViewModeFULL captures enum value "FULL" + V2beta1GetRunRequestViewModeFULL V2beta1GetRunRequestViewMode = "FULL" +) + +// for schema +var v2beta1GetRunRequestViewModeEnum []interface{} + +func init() { + var res []V2beta1GetRunRequestViewMode + if err := json.Unmarshal([]byte(`["DEFAULT","FULL"]`), &res); err != nil { + panic(err) + } + for _, v := range res { + v2beta1GetRunRequestViewModeEnum = append(v2beta1GetRunRequestViewModeEnum, v) + } +} + +func (m V2beta1GetRunRequestViewMode) validateV2beta1GetRunRequestViewModeEnum(path, location string, value V2beta1GetRunRequestViewMode) error { + if err := validate.EnumCase(path, location, value, v2beta1GetRunRequestViewModeEnum, true); err != nil { + return err + } + return nil +} + +// Validate validates this v2beta1 get run request view mode +func (m V2beta1GetRunRequestViewMode) Validate(formats strfmt.Registry) error { + var res []error + + // value enum + if err := m.validateV2beta1GetRunRequestViewModeEnum("", "body", m); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +// ContextValidate validates this v2beta1 get run request view mode based on context it is used +func (m V2beta1GetRunRequestViewMode) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_i_o_producer.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_i_o_producer.go new file mode 100644 index 00000000000..249a39bf51d --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_i_o_producer.go @@ -0,0 +1,54 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// V2beta1IOProducer v2beta1 i o producer +// +// swagger:model v2beta1IOProducer +type V2beta1IOProducer struct { + + // When a source is from an iteration Runtime + // task type inside a ParallelFor + Iteration string `json:"iteration,omitempty"` + + // task name + TaskName string `json:"task_name,omitempty"` +} + +// Validate validates this v2beta1 i o producer +func (m *V2beta1IOProducer) Validate(formats strfmt.Registry) error { + return nil +} + +// ContextValidate validates this v2beta1 i o producer based on context it is used +func (m *V2beta1IOProducer) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} + +// MarshalBinary interface implementation +func (m *V2beta1IOProducer) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *V2beta1IOProducer) UnmarshalBinary(b []byte) error { + var res V2beta1IOProducer + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_i_o_type.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_i_o_type.go new file mode 100644 index 00000000000..8de543fc507 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_i_o_type.go @@ -0,0 +1,151 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "encoding/json" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/validate" +) + +// V2beta1IOType Describes the I/O relationship between +// Artifacts/Parameters and Tasks. +// There are a couple of instances where +// input/outputs have special types such +// as in the case of LoopArguments or +// dsl.Collected outputs. +// +// - UNSPECIFIED: For validation +// - COMPONENT_DEFAULT_INPUT: This is used for inputs that are +// +// provided via default parameters in +// the component input definitions +// - TASK_OUTPUT_INPUT: This is used for inputs that are +// +// provided via upstream tasks. +// In the sdk this appears as: +// TaskInputsSpec.kind.task_output_parameter +// & TaskInputsSpec.kind.task_output_artifact +// - COMPONENT_INPUT: Used for inputs that are +// +// passed from parent tasks. +// - RUNTIME_VALUE_INPUT: Hardcoded values passed +// +// as arguments to the task. +// - COLLECTED_INPUTS: Used for dsl.Collected +// +// Usage of this type indicates that all +// Artifacts within the IOArtifact.artifacts +// are inputs collected from sub tasks with +// ITERATOR_OUTPUT outputs. +// - ITERATOR_INPUT: In a for loop task, introduced via ParallelFor, this type +// +// is used to indicate whether this resolved input belongs +// to a parameterIterator or artifactIterator. +// In such a case the "artifacts" field for IOArtifact.artifacts +// is the list of resolved items for this parallelFor. +// - ITERATOR_INPUT_RAW: Hardcoded iterator parameters. +// +// Raw Iterator inputs have no producer +// - ITERATOR_OUTPUT: When an output is produced by a Runtime Iteration Task +// +// This value is use to differentiate between standard inputs +// - OUTPUT: All other output types fall under this type. +// - ONE_OF_OUTPUT: An output of a Conditions branch. +// +// swagger:model v2beta1IOType +type V2beta1IOType string + +func NewV2beta1IOType(value V2beta1IOType) *V2beta1IOType { + return &value +} + +// Pointer returns a pointer to a freshly-allocated V2beta1IOType. +func (m V2beta1IOType) Pointer() *V2beta1IOType { + return &m +} + +const ( + + // V2beta1IOTypeUNSPECIFIED captures enum value "UNSPECIFIED" + V2beta1IOTypeUNSPECIFIED V2beta1IOType = "UNSPECIFIED" + + // V2beta1IOTypeCOMPONENTDEFAULTINPUT captures enum value "COMPONENT_DEFAULT_INPUT" + V2beta1IOTypeCOMPONENTDEFAULTINPUT V2beta1IOType = "COMPONENT_DEFAULT_INPUT" + + // V2beta1IOTypeTASKOUTPUTINPUT captures enum value "TASK_OUTPUT_INPUT" + V2beta1IOTypeTASKOUTPUTINPUT V2beta1IOType = "TASK_OUTPUT_INPUT" + + // V2beta1IOTypeCOMPONENTINPUT captures enum value "COMPONENT_INPUT" + V2beta1IOTypeCOMPONENTINPUT V2beta1IOType = "COMPONENT_INPUT" + + // V2beta1IOTypeRUNTIMEVALUEINPUT captures enum value "RUNTIME_VALUE_INPUT" + V2beta1IOTypeRUNTIMEVALUEINPUT V2beta1IOType = "RUNTIME_VALUE_INPUT" + + // V2beta1IOTypeCOLLECTEDINPUTS captures enum value "COLLECTED_INPUTS" + V2beta1IOTypeCOLLECTEDINPUTS V2beta1IOType = "COLLECTED_INPUTS" + + // V2beta1IOTypeITERATORINPUT captures enum value "ITERATOR_INPUT" + V2beta1IOTypeITERATORINPUT V2beta1IOType = "ITERATOR_INPUT" + + // V2beta1IOTypeITERATORINPUTRAW captures enum value "ITERATOR_INPUT_RAW" + V2beta1IOTypeITERATORINPUTRAW V2beta1IOType = "ITERATOR_INPUT_RAW" + + // V2beta1IOTypeITERATOROUTPUT captures enum value "ITERATOR_OUTPUT" + V2beta1IOTypeITERATOROUTPUT V2beta1IOType = "ITERATOR_OUTPUT" + + // V2beta1IOTypeOUTPUT captures enum value "OUTPUT" + V2beta1IOTypeOUTPUT V2beta1IOType = "OUTPUT" + + // V2beta1IOTypeONEOFOUTPUT captures enum value "ONE_OF_OUTPUT" + V2beta1IOTypeONEOFOUTPUT V2beta1IOType = "ONE_OF_OUTPUT" + + // V2beta1IOTypeTASKFINALSTATUSOUTPUT captures enum value "TASK_FINAL_STATUS_OUTPUT" + V2beta1IOTypeTASKFINALSTATUSOUTPUT V2beta1IOType = "TASK_FINAL_STATUS_OUTPUT" +) + +// for schema +var v2beta1IOTypeEnum []interface{} + +func init() { + var res []V2beta1IOType + if err := json.Unmarshal([]byte(`["UNSPECIFIED","COMPONENT_DEFAULT_INPUT","TASK_OUTPUT_INPUT","COMPONENT_INPUT","RUNTIME_VALUE_INPUT","COLLECTED_INPUTS","ITERATOR_INPUT","ITERATOR_INPUT_RAW","ITERATOR_OUTPUT","OUTPUT","ONE_OF_OUTPUT","TASK_FINAL_STATUS_OUTPUT"]`), &res); err != nil { + panic(err) + } + for _, v := range res { + v2beta1IOTypeEnum = append(v2beta1IOTypeEnum, v) + } +} + +func (m V2beta1IOType) validateV2beta1IOTypeEnum(path, location string, value V2beta1IOType) error { + if err := validate.EnumCase(path, location, value, v2beta1IOTypeEnum, true); err != nil { + return err + } + return nil +} + +// Validate validates this v2beta1 i o type +func (m V2beta1IOType) Validate(formats strfmt.Registry) error { + var res []error + + // value enum + if err := m.validateV2beta1IOTypeEnum("", "body", m); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +// ContextValidate validates this v2beta1 i o type based on context it is used +func (m V2beta1IOType) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_list_runs_request_view_mode.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_list_runs_request_view_mode.go new file mode 100644 index 00000000000..1920720a958 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_list_runs_request_view_mode.go @@ -0,0 +1,82 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "encoding/json" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/validate" +) + +// V2beta1ListRunsRequestViewMode - DEFAULT: By default `tasks` field is omitted. +// This provides a faster and leaner run object. +// - FULL: This view mode displays all the tasks for this run +// +// with all its fields populated. +// +// swagger:model v2beta1ListRunsRequestViewMode +type V2beta1ListRunsRequestViewMode string + +func NewV2beta1ListRunsRequestViewMode(value V2beta1ListRunsRequestViewMode) *V2beta1ListRunsRequestViewMode { + return &value +} + +// Pointer returns a pointer to a freshly-allocated V2beta1ListRunsRequestViewMode. +func (m V2beta1ListRunsRequestViewMode) Pointer() *V2beta1ListRunsRequestViewMode { + return &m +} + +const ( + + // V2beta1ListRunsRequestViewModeDEFAULT captures enum value "DEFAULT" + V2beta1ListRunsRequestViewModeDEFAULT V2beta1ListRunsRequestViewMode = "DEFAULT" + + // V2beta1ListRunsRequestViewModeFULL captures enum value "FULL" + V2beta1ListRunsRequestViewModeFULL V2beta1ListRunsRequestViewMode = "FULL" +) + +// for schema +var v2beta1ListRunsRequestViewModeEnum []interface{} + +func init() { + var res []V2beta1ListRunsRequestViewMode + if err := json.Unmarshal([]byte(`["DEFAULT","FULL"]`), &res); err != nil { + panic(err) + } + for _, v := range res { + v2beta1ListRunsRequestViewModeEnum = append(v2beta1ListRunsRequestViewModeEnum, v) + } +} + +func (m V2beta1ListRunsRequestViewMode) validateV2beta1ListRunsRequestViewModeEnum(path, location string, value V2beta1ListRunsRequestViewMode) error { + if err := validate.EnumCase(path, location, value, v2beta1ListRunsRequestViewModeEnum, true); err != nil { + return err + } + return nil +} + +// Validate validates this v2beta1 list runs request view mode +func (m V2beta1ListRunsRequestViewMode) Validate(formats strfmt.Registry) error { + var res []error + + // value enum + if err := m.validateV2beta1ListRunsRequestViewModeEnum("", "body", m); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +// ContextValidate validates this v2beta1 list runs request view mode based on context it is used +func (m V2beta1ListRunsRequestViewMode) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_list_tasks_response.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_list_tasks_response.go new file mode 100644 index 00000000000..040cb8ea326 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_list_tasks_response.go @@ -0,0 +1,127 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "strconv" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" +) + +// V2beta1ListTasksResponse v2beta1 list tasks response +// +// swagger:model v2beta1ListTasksResponse +type V2beta1ListTasksResponse struct { + + // next page token + NextPageToken string `json:"next_page_token,omitempty"` + + // tasks + Tasks []*V2beta1PipelineTaskDetail `json:"tasks"` + + // total size + TotalSize int32 `json:"total_size,omitempty"` +} + +// Validate validates this v2beta1 list tasks response +func (m *V2beta1ListTasksResponse) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateTasks(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1ListTasksResponse) validateTasks(formats strfmt.Registry) error { + if swag.IsZero(m.Tasks) { // not required + return nil + } + + for i := 0; i < len(m.Tasks); i++ { + if swag.IsZero(m.Tasks[i]) { // not required + continue + } + + if m.Tasks[i] != nil { + if err := m.Tasks[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("tasks" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("tasks" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this v2beta1 list tasks response based on the context it is used +func (m *V2beta1ListTasksResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateTasks(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1ListTasksResponse) contextValidateTasks(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Tasks); i++ { + + if m.Tasks[i] != nil { + + if swag.IsZero(m.Tasks[i]) { // not required + return nil + } + + if err := m.Tasks[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("tasks" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("tasks" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *V2beta1ListTasksResponse) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *V2beta1ListTasksResponse) UnmarshalBinary(b []byte) error { + var res V2beta1ListTasksResponse + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_pipeline_task_detail.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_pipeline_task_detail.go index 4cfde980764..f4234e172d6 100644 --- a/backend/api/v2beta1/go_http_client/run_model/v2beta1_pipeline_task_detail.go +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_pipeline_task_detail.go @@ -20,7 +20,10 @@ import ( // swagger:model v2beta1PipelineTaskDetail type V2beta1PipelineTaskDetail struct { - // Sequence of dependen tasks. + // cache fingerprint + CacheFingerprint string `json:"cache_fingerprint,omitempty"` + + // Sequence of dependent tasks. ChildTasks []*PipelineTaskDetailChildTask `json:"child_tasks"` // Creation time of a task. @@ -39,42 +42,54 @@ type V2beta1PipelineTaskDetail struct { // Only populated when the task is in FAILED or CANCELED state. Error *GooglerpcStatus `json:"error,omitempty"` - // Execution id of the corresponding entry in ML metadata store. - ExecutionID string `json:"execution_id,omitempty"` - - // Execution information of a task. - ExecutorDetail *V2beta1PipelineTaskExecutorDetail `json:"executor_detail,omitempty"` + // inputs + Inputs *PipelineTaskDetailInputOutputs `json:"inputs,omitempty"` - // Input artifacts of the task. - Inputs map[string]V2beta1ArtifactList `json:"inputs,omitempty"` + // name + Name string `json:"name,omitempty"` - // Output artifacts of the task. - Outputs map[string]V2beta1ArtifactList `json:"outputs,omitempty"` + // outputs + Outputs *PipelineTaskDetailInputOutputs `json:"outputs,omitempty"` // ID of the parent task if the task is within a component scope. // Empty if the task is at the root level. ParentTaskID string `json:"parent_task_id,omitempty"` - // Name of the corresponding pod assigned by the orchestration engine. - // Also known as node_id. - PodName string `json:"pod_name,omitempty"` + // pods + Pods []*PipelineTaskDetailTaskPod `json:"pods"` // ID of the parent run. RunID string `json:"run_id,omitempty"` + // The scope of this task within the + // pipeline spec. Each entry represents + // either a Dag Task or a Container task. + // Note that Container task will are + // always the last entry in a scope_path. + ScopePath []string `json:"scope_path"` + // Starting time of a task. // Format: date-time StartTime strfmt.DateTime `json:"start_time,omitempty"` - // Runtime state of a task. - State *V2beta1RuntimeState `json:"state,omitempty"` + // state + State *PipelineTaskDetailTaskState `json:"state,omitempty"` // A sequence of task statuses. This field keeps a record // of state transitions. - StateHistory []*V2beta1RuntimeStatus `json:"state_history"` + StateHistory []*PipelineTaskDetailTaskStatus `json:"state_history"` + + // status metadata + StatusMetadata *PipelineTaskDetailStatusMetadata `json:"status_metadata,omitempty"` // System-generated ID of a task. TaskID string `json:"task_id,omitempty"` + + // type + Type *PipelineTaskDetailTaskType `json:"type,omitempty"` + + // type attributes + TypeAttributes *PipelineTaskDetailTypeAttributes `json:"type_attributes,omitempty"` } // Validate validates this v2beta1 pipeline task detail @@ -97,15 +112,15 @@ func (m *V2beta1PipelineTaskDetail) Validate(formats strfmt.Registry) error { res = append(res, err) } - if err := m.validateExecutorDetail(formats); err != nil { + if err := m.validateInputs(formats); err != nil { res = append(res, err) } - if err := m.validateInputs(formats); err != nil { + if err := m.validateOutputs(formats); err != nil { res = append(res, err) } - if err := m.validateOutputs(formats); err != nil { + if err := m.validatePods(formats); err != nil { res = append(res, err) } @@ -121,6 +136,18 @@ func (m *V2beta1PipelineTaskDetail) Validate(formats strfmt.Registry) error { res = append(res, err) } + if err := m.validateStatusMetadata(formats); err != nil { + res = append(res, err) + } + + if err := m.validateType(formats); err != nil { + res = append(res, err) + } + + if err := m.validateTypeAttributes(formats); err != nil { + res = append(res, err) + } + if len(res) > 0 { return errors.CompositeValidationError(res...) } @@ -196,17 +223,17 @@ func (m *V2beta1PipelineTaskDetail) validateError(formats strfmt.Registry) error return nil } -func (m *V2beta1PipelineTaskDetail) validateExecutorDetail(formats strfmt.Registry) error { - if swag.IsZero(m.ExecutorDetail) { // not required +func (m *V2beta1PipelineTaskDetail) validateInputs(formats strfmt.Registry) error { + if swag.IsZero(m.Inputs) { // not required return nil } - if m.ExecutorDetail != nil { - if err := m.ExecutorDetail.Validate(formats); err != nil { + if m.Inputs != nil { + if err := m.Inputs.Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("executor_detail") + return ve.ValidateName("inputs") } else if ce, ok := err.(*errors.CompositeError); ok { - return ce.ValidateName("executor_detail") + return ce.ValidateName("inputs") } return err } @@ -215,48 +242,41 @@ func (m *V2beta1PipelineTaskDetail) validateExecutorDetail(formats strfmt.Regist return nil } -func (m *V2beta1PipelineTaskDetail) validateInputs(formats strfmt.Registry) error { - if swag.IsZero(m.Inputs) { // not required +func (m *V2beta1PipelineTaskDetail) validateOutputs(formats strfmt.Registry) error { + if swag.IsZero(m.Outputs) { // not required return nil } - for k := range m.Inputs { - - if err := validate.Required("inputs"+"."+k, "body", m.Inputs[k]); err != nil { - return err - } - if val, ok := m.Inputs[k]; ok { - if err := val.Validate(formats); err != nil { - if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("inputs" + "." + k) - } else if ce, ok := err.(*errors.CompositeError); ok { - return ce.ValidateName("inputs" + "." + k) - } - return err + if m.Outputs != nil { + if err := m.Outputs.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("outputs") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("outputs") } + return err } - } return nil } -func (m *V2beta1PipelineTaskDetail) validateOutputs(formats strfmt.Registry) error { - if swag.IsZero(m.Outputs) { // not required +func (m *V2beta1PipelineTaskDetail) validatePods(formats strfmt.Registry) error { + if swag.IsZero(m.Pods) { // not required return nil } - for k := range m.Outputs { - - if err := validate.Required("outputs"+"."+k, "body", m.Outputs[k]); err != nil { - return err + for i := 0; i < len(m.Pods); i++ { + if swag.IsZero(m.Pods[i]) { // not required + continue } - if val, ok := m.Outputs[k]; ok { - if err := val.Validate(formats); err != nil { + + if m.Pods[i] != nil { + if err := m.Pods[i].Validate(formats); err != nil { if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("outputs" + "." + k) + return ve.ValidateName("pods" + "." + strconv.Itoa(i)) } else if ce, ok := err.(*errors.CompositeError); ok { - return ce.ValidateName("outputs" + "." + k) + return ce.ValidateName("pods" + "." + strconv.Itoa(i)) } return err } @@ -324,6 +344,63 @@ func (m *V2beta1PipelineTaskDetail) validateStateHistory(formats strfmt.Registry return nil } +func (m *V2beta1PipelineTaskDetail) validateStatusMetadata(formats strfmt.Registry) error { + if swag.IsZero(m.StatusMetadata) { // not required + return nil + } + + if m.StatusMetadata != nil { + if err := m.StatusMetadata.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("status_metadata") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("status_metadata") + } + return err + } + } + + return nil +} + +func (m *V2beta1PipelineTaskDetail) validateType(formats strfmt.Registry) error { + if swag.IsZero(m.Type) { // not required + return nil + } + + if m.Type != nil { + if err := m.Type.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +func (m *V2beta1PipelineTaskDetail) validateTypeAttributes(formats strfmt.Registry) error { + if swag.IsZero(m.TypeAttributes) { // not required + return nil + } + + if m.TypeAttributes != nil { + if err := m.TypeAttributes.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type_attributes") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type_attributes") + } + return err + } + } + + return nil +} + // ContextValidate validate this v2beta1 pipeline task detail based on the context it is used func (m *V2beta1PipelineTaskDetail) ContextValidate(ctx context.Context, formats strfmt.Registry) error { var res []error @@ -336,15 +413,15 @@ func (m *V2beta1PipelineTaskDetail) ContextValidate(ctx context.Context, formats res = append(res, err) } - if err := m.contextValidateExecutorDetail(ctx, formats); err != nil { + if err := m.contextValidateInputs(ctx, formats); err != nil { res = append(res, err) } - if err := m.contextValidateInputs(ctx, formats); err != nil { + if err := m.contextValidateOutputs(ctx, formats); err != nil { res = append(res, err) } - if err := m.contextValidateOutputs(ctx, formats); err != nil { + if err := m.contextValidatePods(ctx, formats); err != nil { res = append(res, err) } @@ -356,6 +433,18 @@ func (m *V2beta1PipelineTaskDetail) ContextValidate(ctx context.Context, formats res = append(res, err) } + if err := m.contextValidateStatusMetadata(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateType(ctx, formats); err != nil { + res = append(res, err) + } + + if err := m.contextValidateTypeAttributes(ctx, formats); err != nil { + res = append(res, err) + } + if len(res) > 0 { return errors.CompositeValidationError(res...) } @@ -408,19 +497,19 @@ func (m *V2beta1PipelineTaskDetail) contextValidateError(ctx context.Context, fo return nil } -func (m *V2beta1PipelineTaskDetail) contextValidateExecutorDetail(ctx context.Context, formats strfmt.Registry) error { +func (m *V2beta1PipelineTaskDetail) contextValidateInputs(ctx context.Context, formats strfmt.Registry) error { - if m.ExecutorDetail != nil { + if m.Inputs != nil { - if swag.IsZero(m.ExecutorDetail) { // not required + if swag.IsZero(m.Inputs) { // not required return nil } - if err := m.ExecutorDetail.ContextValidate(ctx, formats); err != nil { + if err := m.Inputs.ContextValidate(ctx, formats); err != nil { if ve, ok := err.(*errors.Validation); ok { - return ve.ValidateName("executor_detail") + return ve.ValidateName("inputs") } else if ce, ok := err.(*errors.CompositeError); ok { - return ce.ValidateName("executor_detail") + return ce.ValidateName("inputs") } return err } @@ -429,27 +518,43 @@ func (m *V2beta1PipelineTaskDetail) contextValidateExecutorDetail(ctx context.Co return nil } -func (m *V2beta1PipelineTaskDetail) contextValidateInputs(ctx context.Context, formats strfmt.Registry) error { +func (m *V2beta1PipelineTaskDetail) contextValidateOutputs(ctx context.Context, formats strfmt.Registry) error { - for k := range m.Inputs { + if m.Outputs != nil { - if val, ok := m.Inputs[k]; ok { - if err := val.ContextValidate(ctx, formats); err != nil { - return err - } + if swag.IsZero(m.Outputs) { // not required + return nil } + if err := m.Outputs.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("outputs") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("outputs") + } + return err + } } return nil } -func (m *V2beta1PipelineTaskDetail) contextValidateOutputs(ctx context.Context, formats strfmt.Registry) error { +func (m *V2beta1PipelineTaskDetail) contextValidatePods(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Pods); i++ { - for k := range m.Outputs { + if m.Pods[i] != nil { + + if swag.IsZero(m.Pods[i]) { // not required + return nil + } - if val, ok := m.Outputs[k]; ok { - if err := val.ContextValidate(ctx, formats); err != nil { + if err := m.Pods[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("pods" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pods" + "." + strconv.Itoa(i)) + } return err } } @@ -505,6 +610,69 @@ func (m *V2beta1PipelineTaskDetail) contextValidateStateHistory(ctx context.Cont return nil } +func (m *V2beta1PipelineTaskDetail) contextValidateStatusMetadata(ctx context.Context, formats strfmt.Registry) error { + + if m.StatusMetadata != nil { + + if swag.IsZero(m.StatusMetadata) { // not required + return nil + } + + if err := m.StatusMetadata.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("status_metadata") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("status_metadata") + } + return err + } + } + + return nil +} + +func (m *V2beta1PipelineTaskDetail) contextValidateType(ctx context.Context, formats strfmt.Registry) error { + + if m.Type != nil { + + if swag.IsZero(m.Type) { // not required + return nil + } + + if err := m.Type.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type") + } + return err + } + } + + return nil +} + +func (m *V2beta1PipelineTaskDetail) contextValidateTypeAttributes(ctx context.Context, formats strfmt.Registry) error { + + if m.TypeAttributes != nil { + + if swag.IsZero(m.TypeAttributes) { // not required + return nil + } + + if err := m.TypeAttributes.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type_attributes") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("type_attributes") + } + return err + } + } + + return nil +} + // MarshalBinary interface implementation func (m *V2beta1PipelineTaskDetail) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_pipeline_task_executor_detail.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_pipeline_task_executor_detail.go deleted file mode 100644 index b2e2e18a78e..00000000000 --- a/backend/api/v2beta1/go_http_client/run_model/v2beta1_pipeline_task_executor_detail.go +++ /dev/null @@ -1,67 +0,0 @@ -// Code generated by go-swagger; DO NOT EDIT. - -package run_model - -// This file was generated by the swagger tool. -// Editing this file might prove futile when you re-run the swagger generate command - -import ( - "context" - - "github.com/go-openapi/strfmt" - "github.com/go-openapi/swag" -) - -// V2beta1PipelineTaskExecutorDetail Runtime information of a pipeline task executor. -// -// swagger:model v2beta1PipelineTaskExecutorDetail -type V2beta1PipelineTaskExecutorDetail struct { - - // The names of the previously failed job for the main container - // executions. The list includes the all attempts in chronological order. - FailedMainJobs []string `json:"failed_main_jobs"` - - // The names of the previously failed job for the - // pre-caching-check container executions. This job will be available if the - // Run.pipeline_spec specifies the `pre_caching_check` hook in - // the lifecycle events. - // The list includes the all attempts in chronological order. - FailedPreCachingCheckJobs []string `json:"failed_pre_caching_check_jobs"` - - // The name of the job for the main container execution. - MainJob string `json:"main_job,omitempty"` - - // The name of the job for the pre-caching-check container - // execution. This job will be available if the - // Run.pipeline_spec specifies the `pre_caching_check` hook in - // the lifecycle events. - PreCachingCheckJob string `json:"pre_caching_check_job,omitempty"` -} - -// Validate validates this v2beta1 pipeline task executor detail -func (m *V2beta1PipelineTaskExecutorDetail) Validate(formats strfmt.Registry) error { - return nil -} - -// ContextValidate validates this v2beta1 pipeline task executor detail based on context it is used -func (m *V2beta1PipelineTaskExecutorDetail) ContextValidate(ctx context.Context, formats strfmt.Registry) error { - return nil -} - -// MarshalBinary interface implementation -func (m *V2beta1PipelineTaskExecutorDetail) MarshalBinary() ([]byte, error) { - if m == nil { - return nil, nil - } - return swag.WriteJSON(m) -} - -// UnmarshalBinary interface implementation -func (m *V2beta1PipelineTaskExecutorDetail) UnmarshalBinary(b []byte) error { - var res V2beta1PipelineTaskExecutorDetail - if err := swag.ReadJSON(b, &res); err != nil { - return err - } - *m = res - return nil -} diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_run.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_run.go index eb83d3e8492..03e68f34600 100644 --- a/backend/api/v2beta1/go_http_client/run_model/v2beta1_run.go +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_run.go @@ -44,6 +44,10 @@ type V2beta1Run struct { // Format: date-time FinishedAt strfmt.DateTime `json:"finished_at,omitempty"` + // Output only. Reference to the pipeline used for this run. + // Read Only: true + PipelineReference *V2beta1PipelineVersionReference `json:"pipeline_reference,omitempty"` + // Pipeline spec. PipelineSpec interface{} `json:"pipeline_spec,omitempty"` @@ -57,6 +61,7 @@ type V2beta1Run struct { RecurringRunID string `json:"recurring_run_id,omitempty"` // Output. Runtime details of a run. + // Either remove or deprecate this RunDetails *V2beta1RunDetails `json:"run_details,omitempty"` // Output. Unique run ID. Generated by API server. @@ -83,6 +88,12 @@ type V2beta1Run struct { // Output. Specifies whether this run is in archived or available mode. StorageState *V2beta1RunStorageState `json:"storage_state,omitempty"` + + // task count + TaskCount int32 `json:"task_count,omitempty"` + + // tasks + Tasks []*V2beta1PipelineTaskDetail `json:"tasks"` } // Validate validates this v2beta1 run @@ -101,6 +112,10 @@ func (m *V2beta1Run) Validate(formats strfmt.Registry) error { res = append(res, err) } + if err := m.validatePipelineReference(formats); err != nil { + res = append(res, err) + } + if err := m.validatePipelineVersionReference(formats); err != nil { res = append(res, err) } @@ -129,6 +144,10 @@ func (m *V2beta1Run) Validate(formats strfmt.Registry) error { res = append(res, err) } + if err := m.validateTasks(formats); err != nil { + res = append(res, err) + } + if len(res) > 0 { return errors.CompositeValidationError(res...) } @@ -178,6 +197,25 @@ func (m *V2beta1Run) validateFinishedAt(formats strfmt.Registry) error { return nil } +func (m *V2beta1Run) validatePipelineReference(formats strfmt.Registry) error { + if swag.IsZero(m.PipelineReference) { // not required + return nil + } + + if m.PipelineReference != nil { + if err := m.PipelineReference.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("pipeline_reference") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipeline_reference") + } + return err + } + } + + return nil +} + func (m *V2beta1Run) validatePipelineVersionReference(formats strfmt.Registry) error { if swag.IsZero(m.PipelineVersionReference) { // not required return nil @@ -311,6 +349,32 @@ func (m *V2beta1Run) validateStorageState(formats strfmt.Registry) error { return nil } +func (m *V2beta1Run) validateTasks(formats strfmt.Registry) error { + if swag.IsZero(m.Tasks) { // not required + return nil + } + + for i := 0; i < len(m.Tasks); i++ { + if swag.IsZero(m.Tasks[i]) { // not required + continue + } + + if m.Tasks[i] != nil { + if err := m.Tasks[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("tasks" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("tasks" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + // ContextValidate validate this v2beta1 run based on the context it is used func (m *V2beta1Run) ContextValidate(ctx context.Context, formats strfmt.Registry) error { var res []error @@ -319,6 +383,10 @@ func (m *V2beta1Run) ContextValidate(ctx context.Context, formats strfmt.Registr res = append(res, err) } + if err := m.contextValidatePipelineReference(ctx, formats); err != nil { + res = append(res, err) + } + if err := m.contextValidatePipelineVersionReference(ctx, formats); err != nil { res = append(res, err) } @@ -343,6 +411,10 @@ func (m *V2beta1Run) ContextValidate(ctx context.Context, formats strfmt.Registr res = append(res, err) } + if err := m.contextValidateTasks(ctx, formats); err != nil { + res = append(res, err) + } + if len(res) > 0 { return errors.CompositeValidationError(res...) } @@ -370,6 +442,27 @@ func (m *V2beta1Run) contextValidateError(ctx context.Context, formats strfmt.Re return nil } +func (m *V2beta1Run) contextValidatePipelineReference(ctx context.Context, formats strfmt.Registry) error { + + if m.PipelineReference != nil { + + if swag.IsZero(m.PipelineReference) { // not required + return nil + } + + if err := m.PipelineReference.ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("pipeline_reference") + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("pipeline_reference") + } + return err + } + } + + return nil +} + func (m *V2beta1Run) contextValidatePipelineVersionReference(ctx context.Context, formats strfmt.Registry) error { if m.PipelineVersionReference != nil { @@ -500,6 +593,31 @@ func (m *V2beta1Run) contextValidateStorageState(ctx context.Context, formats st return nil } +func (m *V2beta1Run) contextValidateTasks(ctx context.Context, formats strfmt.Registry) error { + + for i := 0; i < len(m.Tasks); i++ { + + if m.Tasks[i] != nil { + + if swag.IsZero(m.Tasks[i]) { // not required + return nil + } + + if err := m.Tasks[i].ContextValidate(ctx, formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("tasks" + "." + strconv.Itoa(i)) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("tasks" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + // MarshalBinary interface implementation func (m *V2beta1Run) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_update_tasks_bulk_request.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_update_tasks_bulk_request.go new file mode 100644 index 00000000000..fefe9a6c1e3 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_update_tasks_bulk_request.go @@ -0,0 +1,112 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" + "github.com/go-openapi/validate" +) + +// V2beta1UpdateTasksBulkRequest v2beta1 update tasks bulk request +// +// swagger:model v2beta1UpdateTasksBulkRequest +type V2beta1UpdateTasksBulkRequest struct { + + // Required. Map of task ID to task detail for bulk update. + // Key: task_id, Value: PipelineTaskDetail to update + Tasks map[string]V2beta1PipelineTaskDetail `json:"tasks,omitempty"` +} + +// Validate validates this v2beta1 update tasks bulk request +func (m *V2beta1UpdateTasksBulkRequest) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateTasks(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1UpdateTasksBulkRequest) validateTasks(formats strfmt.Registry) error { + if swag.IsZero(m.Tasks) { // not required + return nil + } + + for k := range m.Tasks { + + if err := validate.Required("tasks"+"."+k, "body", m.Tasks[k]); err != nil { + return err + } + if val, ok := m.Tasks[k]; ok { + if err := val.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("tasks" + "." + k) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("tasks" + "." + k) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this v2beta1 update tasks bulk request based on the context it is used +func (m *V2beta1UpdateTasksBulkRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateTasks(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1UpdateTasksBulkRequest) contextValidateTasks(ctx context.Context, formats strfmt.Registry) error { + + for k := range m.Tasks { + + if val, ok := m.Tasks[k]; ok { + if err := val.ContextValidate(ctx, formats); err != nil { + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *V2beta1UpdateTasksBulkRequest) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *V2beta1UpdateTasksBulkRequest) UnmarshalBinary(b []byte) error { + var res V2beta1UpdateTasksBulkRequest + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/go_http_client/run_model/v2beta1_update_tasks_bulk_response.go b/backend/api/v2beta1/go_http_client/run_model/v2beta1_update_tasks_bulk_response.go new file mode 100644 index 00000000000..72a253f0544 --- /dev/null +++ b/backend/api/v2beta1/go_http_client/run_model/v2beta1_update_tasks_bulk_response.go @@ -0,0 +1,112 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + + "github.com/go-openapi/errors" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" + "github.com/go-openapi/validate" +) + +// V2beta1UpdateTasksBulkResponse v2beta1 update tasks bulk response +// +// swagger:model v2beta1UpdateTasksBulkResponse +type V2beta1UpdateTasksBulkResponse struct { + + // Map of task ID to updated task detail. + // Key: task_id, Value: Updated PipelineTaskDetail + Tasks map[string]V2beta1PipelineTaskDetail `json:"tasks,omitempty"` +} + +// Validate validates this v2beta1 update tasks bulk response +func (m *V2beta1UpdateTasksBulkResponse) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateTasks(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1UpdateTasksBulkResponse) validateTasks(formats strfmt.Registry) error { + if swag.IsZero(m.Tasks) { // not required + return nil + } + + for k := range m.Tasks { + + if err := validate.Required("tasks"+"."+k, "body", m.Tasks[k]); err != nil { + return err + } + if val, ok := m.Tasks[k]; ok { + if err := val.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("tasks" + "." + k) + } else if ce, ok := err.(*errors.CompositeError); ok { + return ce.ValidateName("tasks" + "." + k) + } + return err + } + } + + } + + return nil +} + +// ContextValidate validate this v2beta1 update tasks bulk response based on the context it is used +func (m *V2beta1UpdateTasksBulkResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error { + var res []error + + if err := m.contextValidateTasks(ctx, formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *V2beta1UpdateTasksBulkResponse) contextValidateTasks(ctx context.Context, formats strfmt.Registry) error { + + for k := range m.Tasks { + + if val, ok := m.Tasks[k]; ok { + if err := val.ContextValidate(ctx, formats); err != nil { + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *V2beta1UpdateTasksBulkResponse) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *V2beta1UpdateTasksBulkResponse) UnmarshalBinary(b []byte) error { + var res V2beta1UpdateTasksBulkResponse + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/v2beta1/python_http_client/README.md b/backend/api/v2beta1/python_http_client/README.md index 4d1c26321ed..3e759fd8038 100644 --- a/backend/api/v2beta1/python_http_client/README.md +++ b/backend/api/v2beta1/python_http_client/README.md @@ -78,16 +78,15 @@ configuration = kfp_server_api.Configuration( # Enter a context with an instance of the API client with kfp_server_api.ApiClient(configuration) as api_client: # Create an instance of the API class - api_instance = kfp_server_api.AuthServiceApi(api_client) - namespace = 'namespace_example' # str | Namespace the resource belongs to. (optional) -resources = 'UNASSIGNED_RESOURCES' # str | Resource type asking for authorization. (optional) (default to 'UNASSIGNED_RESOURCES') -verb = 'UNASSIGNED_VERB' # str | Verb on the resource asking for authorization. (optional) (default to 'UNASSIGNED_VERB') + api_instance = kfp_server_api.ArtifactServiceApi(api_client) + body = kfp_server_api.V2beta1CreateArtifactTasksBulkRequest() # V2beta1CreateArtifactTasksBulkRequest | try: - api_response = api_instance.auth_service_authorize(namespace=namespace, resources=resources, verb=verb) + # Creates multiple artifact-task relationships in bulk. + api_response = api_instance.batch_create_artifact_tasks(body) pprint(api_response) except ApiException as e: - print("Exception when calling AuthServiceApi->auth_service_authorize: %s\n" % e) + print("Exception when calling ArtifactServiceApi->batch_create_artifact_tasks: %s\n" % e) ``` @@ -97,6 +96,13 @@ All URIs are relative to *http://localhost* Class | Method | HTTP request | Description ------------ | ------------- | ------------- | ------------- +*ArtifactServiceApi* | [**batch_create_artifact_tasks**](docs/ArtifactServiceApi.md#batch_create_artifact_tasks) | **POST** /apis/v2beta1/artifact_tasks:batchCreate | Creates multiple artifact-task relationships in bulk. +*ArtifactServiceApi* | [**batch_create_artifacts**](docs/ArtifactServiceApi.md#batch_create_artifacts) | **POST** /apis/v2beta1/artifacts:batchCreate | Creates multiple artifacts in bulk. +*ArtifactServiceApi* | [**create_artifact**](docs/ArtifactServiceApi.md#create_artifact) | **POST** /apis/v2beta1/artifacts | Creates a new artifact. +*ArtifactServiceApi* | [**create_artifact_task**](docs/ArtifactServiceApi.md#create_artifact_task) | **POST** /apis/v2beta1/artifact_tasks | Creates an artifact-task relationship. +*ArtifactServiceApi* | [**get_artifact**](docs/ArtifactServiceApi.md#get_artifact) | **GET** /apis/v2beta1/artifacts/{artifact_id} | Finds a specific Artifact by ID. +*ArtifactServiceApi* | [**list_artifact_tasks**](docs/ArtifactServiceApi.md#list_artifact_tasks) | **GET** /apis/v2beta1/artifact_tasks | Lists artifact-task relationships. +*ArtifactServiceApi* | [**list_artifacts**](docs/ArtifactServiceApi.md#list_artifacts) | **GET** /apis/v2beta1/artifacts | Finds all artifacts within the specified namespace. *AuthServiceApi* | [**auth_service_authorize**](docs/AuthServiceApi.md#auth_service_authorize) | **GET** /apis/v2beta1/auth | *ExperimentServiceApi* | [**experiment_service_archive_experiment**](docs/ExperimentServiceApi.md#experiment_service_archive_experiment) | **POST** /apis/v2beta1/experiments/{experiment_id}:archive | Archives an experiment and the experiment's runs and recurring runs. *ExperimentServiceApi* | [**experiment_service_create_experiment**](docs/ExperimentServiceApi.md#experiment_service_create_experiment) | **POST** /apis/v2beta1/experiments | Creates a new experiment. @@ -125,6 +131,10 @@ Class | Method | HTTP request | Description *RecurringRunServiceApi* | [**recurring_run_service_list_recurring_runs**](docs/RecurringRunServiceApi.md#recurring_run_service_list_recurring_runs) | **GET** /apis/v2beta1/recurringruns | Finds all recurring runs given experiment and namespace. If experiment ID is not specified, find all recurring runs across all experiments. *ReportServiceApi* | [**report_service_report_scheduled_workflow**](docs/ReportServiceApi.md#report_service_report_scheduled_workflow) | **POST** /apis/v2beta1/scheduledworkflows | *ReportServiceApi* | [**report_service_report_workflow**](docs/ReportServiceApi.md#report_service_report_workflow) | **POST** /apis/v2beta1/workflows | +*RunServiceApi* | [**batch_update_tasks**](docs/RunServiceApi.md#batch_update_tasks) | **POST** /apis/v2beta1/tasks:batchUpdate | Updates multiple tasks in bulk. +*RunServiceApi* | [**create_task**](docs/RunServiceApi.md#create_task) | **POST** /apis/v2beta1/tasks | Creates a new task. +*RunServiceApi* | [**get_task**](docs/RunServiceApi.md#get_task) | **GET** /apis/v2beta1/tasks/{task_id} | Gets a specific task by ID. +*RunServiceApi* | [**list_tasks**](docs/RunServiceApi.md#list_tasks) | **GET** /apis/v2beta1/tasks | Lists tasks with optional filtering. *RunServiceApi* | [**run_service_archive_run**](docs/RunServiceApi.md#run_service_archive_run) | **POST** /apis/v2beta1/runs/{run_id}:archive | Archives a run in an experiment given by run ID and experiment ID. *RunServiceApi* | [**run_service_create_run**](docs/RunServiceApi.md#run_service_create_run) | **POST** /apis/v2beta1/runs | Creates a new run in an experiment specified by experiment ID. If experiment ID is not specified, the run is created in the default experiment. *RunServiceApi* | [**run_service_delete_run**](docs/RunServiceApi.md#run_service_delete_run) | **DELETE** /apis/v2beta1/runs/{run_id} | Deletes a run in an experiment given by run ID and experiment ID. @@ -134,37 +144,62 @@ Class | Method | HTTP request | Description *RunServiceApi* | [**run_service_retry_run**](docs/RunServiceApi.md#run_service_retry_run) | **POST** /apis/v2beta1/runs/{run_id}:retry | Re-initiates a failed or terminated run. *RunServiceApi* | [**run_service_terminate_run**](docs/RunServiceApi.md#run_service_terminate_run) | **POST** /apis/v2beta1/runs/{run_id}:terminate | Terminates an active run. *RunServiceApi* | [**run_service_unarchive_run**](docs/RunServiceApi.md#run_service_unarchive_run) | **POST** /apis/v2beta1/runs/{run_id}:unarchive | Restores an archived run in an experiment given by run ID and experiment ID. +*RunServiceApi* | [**update_task**](docs/RunServiceApi.md#update_task) | **PATCH** /apis/v2beta1/tasks/{task_id} | Updates an existing task. *VisualizationServiceApi* | [**visualization_service_create_visualization_v1**](docs/VisualizationServiceApi.md#visualization_service_create_visualization_v1) | **POST** /apis/v2beta1/visualizations/{namespace} | ## Documentation For Models + - [ArtifactArtifactType](docs/ArtifactArtifactType.md) - [AuthorizeRequestResources](docs/AuthorizeRequestResources.md) - [AuthorizeRequestVerb](docs/AuthorizeRequestVerb.md) - [GooglerpcStatus](docs/GooglerpcStatus.md) + - [InputOutputsIOArtifact](docs/InputOutputsIOArtifact.md) + - [InputOutputsIOParameter](docs/InputOutputsIOParameter.md) - [PipelineTaskDetailChildTask](docs/PipelineTaskDetailChildTask.md) + - [PipelineTaskDetailInputOutputs](docs/PipelineTaskDetailInputOutputs.md) + - [PipelineTaskDetailStatusMetadata](docs/PipelineTaskDetailStatusMetadata.md) + - [PipelineTaskDetailTaskPod](docs/PipelineTaskDetailTaskPod.md) + - [PipelineTaskDetailTaskPodType](docs/PipelineTaskDetailTaskPodType.md) + - [PipelineTaskDetailTaskState](docs/PipelineTaskDetailTaskState.md) + - [PipelineTaskDetailTaskStatus](docs/PipelineTaskDetailTaskStatus.md) + - [PipelineTaskDetailTaskType](docs/PipelineTaskDetailTaskType.md) + - [PipelineTaskDetailTypeAttributes](docs/PipelineTaskDetailTypeAttributes.md) - [PredicateIntValues](docs/PredicateIntValues.md) - [PredicateLongValues](docs/PredicateLongValues.md) - [PredicateStringValues](docs/PredicateStringValues.md) - [ProtobufAny](docs/ProtobufAny.md) - [ProtobufNullValue](docs/ProtobufNullValue.md) - [RecurringRunMode](docs/RecurringRunMode.md) - - [V2beta1ArtifactList](docs/V2beta1ArtifactList.md) + - [V2beta1Artifact](docs/V2beta1Artifact.md) + - [V2beta1ArtifactTask](docs/V2beta1ArtifactTask.md) + - [V2beta1CreateArtifactRequest](docs/V2beta1CreateArtifactRequest.md) + - [V2beta1CreateArtifactTaskRequest](docs/V2beta1CreateArtifactTaskRequest.md) + - [V2beta1CreateArtifactTasksBulkRequest](docs/V2beta1CreateArtifactTasksBulkRequest.md) + - [V2beta1CreateArtifactTasksBulkResponse](docs/V2beta1CreateArtifactTasksBulkResponse.md) + - [V2beta1CreateArtifactsBulkRequest](docs/V2beta1CreateArtifactsBulkRequest.md) + - [V2beta1CreateArtifactsBulkResponse](docs/V2beta1CreateArtifactsBulkResponse.md) - [V2beta1CreatePipelineAndVersionRequest](docs/V2beta1CreatePipelineAndVersionRequest.md) - [V2beta1CronSchedule](docs/V2beta1CronSchedule.md) - [V2beta1Experiment](docs/V2beta1Experiment.md) - [V2beta1ExperimentStorageState](docs/V2beta1ExperimentStorageState.md) - [V2beta1Filter](docs/V2beta1Filter.md) - [V2beta1GetHealthzResponse](docs/V2beta1GetHealthzResponse.md) + - [V2beta1GetRunRequestViewMode](docs/V2beta1GetRunRequestViewMode.md) + - [V2beta1IOProducer](docs/V2beta1IOProducer.md) + - [V2beta1IOType](docs/V2beta1IOType.md) + - [V2beta1ListArtifactResponse](docs/V2beta1ListArtifactResponse.md) + - [V2beta1ListArtifactTasksResponse](docs/V2beta1ListArtifactTasksResponse.md) - [V2beta1ListExperimentsResponse](docs/V2beta1ListExperimentsResponse.md) - [V2beta1ListPipelineVersionsResponse](docs/V2beta1ListPipelineVersionsResponse.md) - [V2beta1ListPipelinesResponse](docs/V2beta1ListPipelinesResponse.md) - [V2beta1ListRecurringRunsResponse](docs/V2beta1ListRecurringRunsResponse.md) + - [V2beta1ListRunsRequestViewMode](docs/V2beta1ListRunsRequestViewMode.md) - [V2beta1ListRunsResponse](docs/V2beta1ListRunsResponse.md) + - [V2beta1ListTasksResponse](docs/V2beta1ListTasksResponse.md) - [V2beta1PeriodicSchedule](docs/V2beta1PeriodicSchedule.md) - [V2beta1Pipeline](docs/V2beta1Pipeline.md) - [V2beta1PipelineTaskDetail](docs/V2beta1PipelineTaskDetail.md) - - [V2beta1PipelineTaskExecutorDetail](docs/V2beta1PipelineTaskExecutorDetail.md) - [V2beta1PipelineVersion](docs/V2beta1PipelineVersion.md) - [V2beta1PipelineVersionReference](docs/V2beta1PipelineVersionReference.md) - [V2beta1Predicate](docs/V2beta1Predicate.md) @@ -179,6 +214,8 @@ Class | Method | HTTP request | Description - [V2beta1RuntimeState](docs/V2beta1RuntimeState.md) - [V2beta1RuntimeStatus](docs/V2beta1RuntimeStatus.md) - [V2beta1Trigger](docs/V2beta1Trigger.md) + - [V2beta1UpdateTasksBulkRequest](docs/V2beta1UpdateTasksBulkRequest.md) + - [V2beta1UpdateTasksBulkResponse](docs/V2beta1UpdateTasksBulkResponse.md) - [V2beta1Url](docs/V2beta1Url.md) - [V2beta1Visualization](docs/V2beta1Visualization.md) - [V2beta1VisualizationType](docs/V2beta1VisualizationType.md) diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1ArtifactList.md b/backend/api/v2beta1/python_http_client/docs/ArtifactArtifactType.md similarity index 67% rename from backend/api/v2beta1/python_http_client/docs/V2beta1ArtifactList.md rename to backend/api/v2beta1/python_http_client/docs/ArtifactArtifactType.md index 5a4feade0c6..8caeb82a27b 100644 --- a/backend/api/v2beta1/python_http_client/docs/V2beta1ArtifactList.md +++ b/backend/api/v2beta1/python_http_client/docs/ArtifactArtifactType.md @@ -1,10 +1,9 @@ -# V2beta1ArtifactList +# ArtifactArtifactType -A list of artifact metadata. + - TYPE_UNSPECIFIED: default; treated as \"not set\" reject if unset. ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**artifact_ids** | **list[str]** | A list of artifact metadata ids. | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/backend/api/v2beta1/python_http_client/docs/ArtifactServiceApi.md b/backend/api/v2beta1/python_http_client/docs/ArtifactServiceApi.md new file mode 100644 index 00000000000..6e62f5f03e0 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/ArtifactServiceApi.md @@ -0,0 +1,569 @@ +# kfp_server_api.ArtifactServiceApi + +All URIs are relative to *http://localhost* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**batch_create_artifact_tasks**](ArtifactServiceApi.md#batch_create_artifact_tasks) | **POST** /apis/v2beta1/artifact_tasks:batchCreate | Creates multiple artifact-task relationships in bulk. +[**batch_create_artifacts**](ArtifactServiceApi.md#batch_create_artifacts) | **POST** /apis/v2beta1/artifacts:batchCreate | Creates multiple artifacts in bulk. +[**create_artifact**](ArtifactServiceApi.md#create_artifact) | **POST** /apis/v2beta1/artifacts | Creates a new artifact. +[**create_artifact_task**](ArtifactServiceApi.md#create_artifact_task) | **POST** /apis/v2beta1/artifact_tasks | Creates an artifact-task relationship. +[**get_artifact**](ArtifactServiceApi.md#get_artifact) | **GET** /apis/v2beta1/artifacts/{artifact_id} | Finds a specific Artifact by ID. +[**list_artifact_tasks**](ArtifactServiceApi.md#list_artifact_tasks) | **GET** /apis/v2beta1/artifact_tasks | Lists artifact-task relationships. +[**list_artifacts**](ArtifactServiceApi.md#list_artifacts) | **GET** /apis/v2beta1/artifacts | Finds all artifacts within the specified namespace. + + +# **batch_create_artifact_tasks** +> V2beta1CreateArtifactTasksBulkResponse batch_create_artifact_tasks(body) + +Creates multiple artifact-task relationships in bulk. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.ArtifactServiceApi(api_client) + body = kfp_server_api.V2beta1CreateArtifactTasksBulkRequest() # V2beta1CreateArtifactTasksBulkRequest | + + try: + # Creates multiple artifact-task relationships in bulk. + api_response = api_instance.batch_create_artifact_tasks(body) + pprint(api_response) + except ApiException as e: + print("Exception when calling ArtifactServiceApi->batch_create_artifact_tasks: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **body** | [**V2beta1CreateArtifactTasksBulkRequest**](V2beta1CreateArtifactTasksBulkRequest.md)| | + +### Return type + +[**V2beta1CreateArtifactTasksBulkResponse**](V2beta1CreateArtifactTasksBulkResponse.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | An unexpected error response. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **batch_create_artifacts** +> V2beta1CreateArtifactsBulkResponse batch_create_artifacts(body) + +Creates multiple artifacts in bulk. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.ArtifactServiceApi(api_client) + body = kfp_server_api.V2beta1CreateArtifactsBulkRequest() # V2beta1CreateArtifactsBulkRequest | + + try: + # Creates multiple artifacts in bulk. + api_response = api_instance.batch_create_artifacts(body) + pprint(api_response) + except ApiException as e: + print("Exception when calling ArtifactServiceApi->batch_create_artifacts: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **body** | [**V2beta1CreateArtifactsBulkRequest**](V2beta1CreateArtifactsBulkRequest.md)| | + +### Return type + +[**V2beta1CreateArtifactsBulkResponse**](V2beta1CreateArtifactsBulkResponse.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | An unexpected error response. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **create_artifact** +> V2beta1Artifact create_artifact(body) + +Creates a new artifact. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.ArtifactServiceApi(api_client) + body = kfp_server_api.V2beta1CreateArtifactRequest() # V2beta1CreateArtifactRequest | + + try: + # Creates a new artifact. + api_response = api_instance.create_artifact(body) + pprint(api_response) + except ApiException as e: + print("Exception when calling ArtifactServiceApi->create_artifact: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **body** | [**V2beta1CreateArtifactRequest**](V2beta1CreateArtifactRequest.md)| | + +### Return type + +[**V2beta1Artifact**](V2beta1Artifact.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | An unexpected error response. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **create_artifact_task** +> V2beta1ArtifactTask create_artifact_task(body) + +Creates an artifact-task relationship. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.ArtifactServiceApi(api_client) + body = kfp_server_api.V2beta1CreateArtifactTaskRequest() # V2beta1CreateArtifactTaskRequest | + + try: + # Creates an artifact-task relationship. + api_response = api_instance.create_artifact_task(body) + pprint(api_response) + except ApiException as e: + print("Exception when calling ArtifactServiceApi->create_artifact_task: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **body** | [**V2beta1CreateArtifactTaskRequest**](V2beta1CreateArtifactTaskRequest.md)| | + +### Return type + +[**V2beta1ArtifactTask**](V2beta1ArtifactTask.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | An unexpected error response. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_artifact** +> V2beta1Artifact get_artifact(artifact_id) + +Finds a specific Artifact by ID. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.ArtifactServiceApi(api_client) + artifact_id = 'artifact_id_example' # str | Required. The ID of the artifact to be retrieved. + + try: + # Finds a specific Artifact by ID. + api_response = api_instance.get_artifact(artifact_id) + pprint(api_response) + except ApiException as e: + print("Exception when calling ArtifactServiceApi->get_artifact: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **artifact_id** | **str**| Required. The ID of the artifact to be retrieved. | + +### Return type + +[**V2beta1Artifact**](V2beta1Artifact.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | An unexpected error response. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **list_artifact_tasks** +> V2beta1ListArtifactTasksResponse list_artifact_tasks(task_ids=task_ids, run_ids=run_ids, artifact_ids=artifact_ids, type=type, page_token=page_token, page_size=page_size, sort_by=sort_by, filter=filter) + +Lists artifact-task relationships. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.ArtifactServiceApi(api_client) + task_ids = ['task_ids_example'] # list[str] | Optional, filter artifact task by a set of task_ids (optional) +run_ids = ['run_ids_example'] # list[str] | Optional, filter artifact task by a set of run_ids (optional) +artifact_ids = ['artifact_ids_example'] # list[str] | Optional, filter artifact task by a set of artifact_ids (optional) +type = 'UNSPECIFIED' # str | Optional. Only list artifact tasks that have artifacts of this type. - UNSPECIFIED: For validation - COMPONENT_DEFAULT_INPUT: This is used for inputs that are provided via default parameters in the component input definitions - TASK_OUTPUT_INPUT: This is used for inputs that are provided via upstream tasks. In the sdk this appears as: TaskInputsSpec.kind.task_output_parameter & TaskInputsSpec.kind.task_output_artifact - COMPONENT_INPUT: Used for inputs that are passed from parent tasks. - RUNTIME_VALUE_INPUT: Hardcoded values passed as arguments to the task. - COLLECTED_INPUTS: Used for dsl.Collected Usage of this type indicates that all Artifacts within the IOArtifact.artifacts are inputs collected from sub tasks with ITERATOR_OUTPUT outputs. - ITERATOR_INPUT: In a for loop task, introduced via ParallelFor, this type is used to indicate whether this resolved input belongs to a parameterIterator or artifactIterator. In such a case the \"artifacts\" field for IOArtifact.artifacts is the list of resolved items for this parallelFor. - ITERATOR_INPUT_RAW: Hardcoded iterator parameters. Raw Iterator inputs have no producer - ITERATOR_OUTPUT: When an output is produced by a Runtime Iteration Task This value is use to differentiate between standard inputs - OUTPUT: All other output types fall under this type. - ONE_OF_OUTPUT: An output of a Conditions branch. (optional) (default to 'UNSPECIFIED') +page_token = 'page_token_example' # str | (optional) +page_size = 56 # int | (optional) +sort_by = 'sort_by_example' # str | (optional) +filter = 'filter_example' # str | (optional) + + try: + # Lists artifact-task relationships. + api_response = api_instance.list_artifact_tasks(task_ids=task_ids, run_ids=run_ids, artifact_ids=artifact_ids, type=type, page_token=page_token, page_size=page_size, sort_by=sort_by, filter=filter) + pprint(api_response) + except ApiException as e: + print("Exception when calling ArtifactServiceApi->list_artifact_tasks: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **task_ids** | [**list[str]**](str.md)| Optional, filter artifact task by a set of task_ids | [optional] + **run_ids** | [**list[str]**](str.md)| Optional, filter artifact task by a set of run_ids | [optional] + **artifact_ids** | [**list[str]**](str.md)| Optional, filter artifact task by a set of artifact_ids | [optional] + **type** | **str**| Optional. Only list artifact tasks that have artifacts of this type. - UNSPECIFIED: For validation - COMPONENT_DEFAULT_INPUT: This is used for inputs that are provided via default parameters in the component input definitions - TASK_OUTPUT_INPUT: This is used for inputs that are provided via upstream tasks. In the sdk this appears as: TaskInputsSpec.kind.task_output_parameter & TaskInputsSpec.kind.task_output_artifact - COMPONENT_INPUT: Used for inputs that are passed from parent tasks. - RUNTIME_VALUE_INPUT: Hardcoded values passed as arguments to the task. - COLLECTED_INPUTS: Used for dsl.Collected Usage of this type indicates that all Artifacts within the IOArtifact.artifacts are inputs collected from sub tasks with ITERATOR_OUTPUT outputs. - ITERATOR_INPUT: In a for loop task, introduced via ParallelFor, this type is used to indicate whether this resolved input belongs to a parameterIterator or artifactIterator. In such a case the \"artifacts\" field for IOArtifact.artifacts is the list of resolved items for this parallelFor. - ITERATOR_INPUT_RAW: Hardcoded iterator parameters. Raw Iterator inputs have no producer - ITERATOR_OUTPUT: When an output is produced by a Runtime Iteration Task This value is use to differentiate between standard inputs - OUTPUT: All other output types fall under this type. - ONE_OF_OUTPUT: An output of a Conditions branch. | [optional] [default to 'UNSPECIFIED'] + **page_token** | **str**| | [optional] + **page_size** | **int**| | [optional] + **sort_by** | **str**| | [optional] + **filter** | **str**| | [optional] + +### Return type + +[**V2beta1ListArtifactTasksResponse**](V2beta1ListArtifactTasksResponse.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | An unexpected error response. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **list_artifacts** +> V2beta1ListArtifactResponse list_artifacts(namespace=namespace, page_token=page_token, page_size=page_size, sort_by=sort_by, filter=filter) + +Finds all artifacts within the specified namespace. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.ArtifactServiceApi(api_client) + namespace = 'namespace_example' # str | Optional input. Namespace for the artifacts. (optional) +page_token = 'page_token_example' # str | A page token to request the results page. (optional) +page_size = 56 # int | The number of artifacts to be listed per page. If there are more artifacts than this number, the response message will contain a valid value in the nextPageToken field. (optional) +sort_by = 'sort_by_example' # str | Sorting order in form of \"field_name\", \"field_name asc\" or \"field_name desc\". Ascending by default. (optional) +filter = 'filter_example' # str | A url-encoded, JSON-serialized filter protocol buffer (see [filter.proto](https://github.com/kubeflow/artifacts/blob/master/backend/api/filter.proto)). (optional) + + try: + # Finds all artifacts within the specified namespace. + api_response = api_instance.list_artifacts(namespace=namespace, page_token=page_token, page_size=page_size, sort_by=sort_by, filter=filter) + pprint(api_response) + except ApiException as e: + print("Exception when calling ArtifactServiceApi->list_artifacts: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **namespace** | **str**| Optional input. Namespace for the artifacts. | [optional] + **page_token** | **str**| A page token to request the results page. | [optional] + **page_size** | **int**| The number of artifacts to be listed per page. If there are more artifacts than this number, the response message will contain a valid value in the nextPageToken field. | [optional] + **sort_by** | **str**| Sorting order in form of \"field_name\", \"field_name asc\" or \"field_name desc\". Ascending by default. | [optional] + **filter** | **str**| A url-encoded, JSON-serialized filter protocol buffer (see [filter.proto](https://github.com/kubeflow/artifacts/blob/master/backend/api/filter.proto)). | [optional] + +### Return type + +[**V2beta1ListArtifactResponse**](V2beta1ListArtifactResponse.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | An unexpected error response. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/backend/api/v2beta1/python_http_client/docs/InputOutputsIOArtifact.md b/backend/api/v2beta1/python_http_client/docs/InputOutputsIOArtifact.md new file mode 100644 index 00000000000..b669fd7eb3d --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/InputOutputsIOArtifact.md @@ -0,0 +1,13 @@ +# InputOutputsIOArtifact + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**artifacts** | [**list[V2beta1Artifact]**](V2beta1Artifact.md) | | [optional] +**type** | [**V2beta1IOType**](V2beta1IOType.md) | | [optional] +**artifact_key** | **str** | | [optional] +**producer** | [**V2beta1IOProducer**](V2beta1IOProducer.md) | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/InputOutputsIOParameter.md b/backend/api/v2beta1/python_http_client/docs/InputOutputsIOParameter.md new file mode 100644 index 00000000000..5ef901b79cd --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/InputOutputsIOParameter.md @@ -0,0 +1,13 @@ +# InputOutputsIOParameter + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**value** | [**object**](.md) | | [optional] +**type** | [**V2beta1IOType**](V2beta1IOType.md) | | [optional] +**parameter_key** | **str** | | [optional] +**producer** | [**V2beta1IOProducer**](V2beta1IOProducer.md) | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailChildTask.md b/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailChildTask.md index 3c030e346bb..f84404d2279 100644 --- a/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailChildTask.md +++ b/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailChildTask.md @@ -5,7 +5,7 @@ A dependent task that requires this one to succeed. Represented by either task_i Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **task_id** | **str** | System-generated ID of a task. | [optional] -**pod_name** | **str** | Name of the corresponding pod assigned by the orchestration engine. Also known as node_id. | [optional] +**name** | **str** | | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailInputOutputs.md b/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailInputOutputs.md new file mode 100644 index 00000000000..fa5f7ac8344 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailInputOutputs.md @@ -0,0 +1,11 @@ +# PipelineTaskDetailInputOutputs + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**parameters** | [**list[InputOutputsIOParameter]**](InputOutputsIOParameter.md) | | [optional] +**artifacts** | [**list[InputOutputsIOArtifact]**](InputOutputsIOArtifact.md) | Output Only. To create Artifacts for a task use ArtifactTasks to link artifacts to tasks. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailStatusMetadata.md b/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailStatusMetadata.md new file mode 100644 index 00000000000..ad9df35a19d --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailStatusMetadata.md @@ -0,0 +1,11 @@ +# PipelineTaskDetailStatusMetadata + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | KFP Backend will populate this field with error messages if any are available on a Failed task. | [optional] +**custom_properties** | **dict(str, object)** | Custom status metadata, this can be used to provide additional status info for a given task during runtime This is currently not utilized by KFP backend. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailTaskPod.md b/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailTaskPod.md new file mode 100644 index 00000000000..9bebc44bfbb --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailTaskPod.md @@ -0,0 +1,12 @@ +# PipelineTaskDetailTaskPod + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | [optional] +**uid** | **str** | | [optional] +**type** | [**PipelineTaskDetailTaskPodType**](PipelineTaskDetailTaskPodType.md) | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailTaskPodType.md b/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailTaskPodType.md new file mode 100644 index 00000000000..e88292e9a42 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailTaskPodType.md @@ -0,0 +1,9 @@ +# PipelineTaskDetailTaskPodType + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailTaskState.md b/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailTaskState.md new file mode 100644 index 00000000000..414cc791eb7 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailTaskState.md @@ -0,0 +1,10 @@ +# PipelineTaskDetailTaskState + +- RUNTIME_STATE_UNSPECIFIED: Default value. This value is not used. - RUNNING: Entity execution is in progress. - SUCCEEDED: Entity completed successfully. - SKIPPED: Entity has been skipped. For example, due to caching. - FAILED: Entity execution has failed. +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailTaskStatus.md b/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailTaskStatus.md new file mode 100644 index 00000000000..e1271a83419 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailTaskStatus.md @@ -0,0 +1,13 @@ +# PipelineTaskDetailTaskStatus + +Timestamped representation of a Task state with an optional error. +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**update_time** | **datetime** | | [optional] +**state** | [**PipelineTaskDetailTaskState**](PipelineTaskDetailTaskState.md) | | [optional] +**error** | [**GooglerpcStatus**](GooglerpcStatus.md) | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailTaskType.md b/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailTaskType.md new file mode 100644 index 00000000000..49fe4452a5a --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailTaskType.md @@ -0,0 +1,10 @@ +# PipelineTaskDetailTaskType + + - ROOT: Root task is the top ancestor task to all tasks in the pipeline run It is the only task with no parent task in a Pipeline Run. - RUNTIME: All child tasks in the Run DAG are Runtime tasks. With the exception of K8S driver pods. These tasks are the only tasks that have Executor Pods. - CONDITION_BRANCH: Condition Branch is the wrapper task of an If block - CONDITION: Condition is an individual \"if\" branch, and is a child to a CONDITION_BRANCH task. - LOOP: Task Group for CONDITION_BRANCH Task Group for RUNTIME Loop Iterations - DAG: Generic DAG task type for types like Nested Pipelines where there is no declarative way to detect this within a driver. +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailTypeAttributes.md b/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailTypeAttributes.md new file mode 100644 index 00000000000..f6ae1bf1a3e --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/PipelineTaskDetailTypeAttributes.md @@ -0,0 +1,11 @@ +# PipelineTaskDetailTypeAttributes + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**iteration_index** | **str** | | [optional] +**iteration_count** | **str** | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/RunServiceApi.md b/backend/api/v2beta1/python_http_client/docs/RunServiceApi.md index de6ff6df7ed..135e7d60916 100644 --- a/backend/api/v2beta1/python_http_client/docs/RunServiceApi.md +++ b/backend/api/v2beta1/python_http_client/docs/RunServiceApi.md @@ -4,6 +4,10 @@ All URIs are relative to *http://localhost* Method | HTTP request | Description ------------- | ------------- | ------------- +[**batch_update_tasks**](RunServiceApi.md#batch_update_tasks) | **POST** /apis/v2beta1/tasks:batchUpdate | Updates multiple tasks in bulk. +[**create_task**](RunServiceApi.md#create_task) | **POST** /apis/v2beta1/tasks | Creates a new task. +[**get_task**](RunServiceApi.md#get_task) | **GET** /apis/v2beta1/tasks/{task_id} | Gets a specific task by ID. +[**list_tasks**](RunServiceApi.md#list_tasks) | **GET** /apis/v2beta1/tasks | Lists tasks with optional filtering. [**run_service_archive_run**](RunServiceApi.md#run_service_archive_run) | **POST** /apis/v2beta1/runs/{run_id}:archive | Archives a run in an experiment given by run ID and experiment ID. [**run_service_create_run**](RunServiceApi.md#run_service_create_run) | **POST** /apis/v2beta1/runs | Creates a new run in an experiment specified by experiment ID. If experiment ID is not specified, the run is created in the default experiment. [**run_service_delete_run**](RunServiceApi.md#run_service_delete_run) | **DELETE** /apis/v2beta1/runs/{run_id} | Deletes a run in an experiment given by run ID and experiment ID. @@ -13,8 +17,325 @@ Method | HTTP request | Description [**run_service_retry_run**](RunServiceApi.md#run_service_retry_run) | **POST** /apis/v2beta1/runs/{run_id}:retry | Re-initiates a failed or terminated run. [**run_service_terminate_run**](RunServiceApi.md#run_service_terminate_run) | **POST** /apis/v2beta1/runs/{run_id}:terminate | Terminates an active run. [**run_service_unarchive_run**](RunServiceApi.md#run_service_unarchive_run) | **POST** /apis/v2beta1/runs/{run_id}:unarchive | Restores an archived run in an experiment given by run ID and experiment ID. +[**update_task**](RunServiceApi.md#update_task) | **PATCH** /apis/v2beta1/tasks/{task_id} | Updates an existing task. +# **batch_update_tasks** +> V2beta1UpdateTasksBulkResponse batch_update_tasks(body) + +Updates multiple tasks in bulk. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.RunServiceApi(api_client) + body = kfp_server_api.V2beta1UpdateTasksBulkRequest() # V2beta1UpdateTasksBulkRequest | + + try: + # Updates multiple tasks in bulk. + api_response = api_instance.batch_update_tasks(body) + pprint(api_response) + except ApiException as e: + print("Exception when calling RunServiceApi->batch_update_tasks: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **body** | [**V2beta1UpdateTasksBulkRequest**](V2beta1UpdateTasksBulkRequest.md)| | + +### Return type + +[**V2beta1UpdateTasksBulkResponse**](V2beta1UpdateTasksBulkResponse.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | An unexpected error response. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **create_task** +> V2beta1PipelineTaskDetail create_task(task) + +Creates a new task. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.RunServiceApi(api_client) + task = kfp_server_api.V2beta1PipelineTaskDetail() # V2beta1PipelineTaskDetail | + + try: + # Creates a new task. + api_response = api_instance.create_task(task) + pprint(api_response) + except ApiException as e: + print("Exception when calling RunServiceApi->create_task: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **task** | [**V2beta1PipelineTaskDetail**](V2beta1PipelineTaskDetail.md)| | + +### Return type + +[**V2beta1PipelineTaskDetail**](V2beta1PipelineTaskDetail.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | An unexpected error response. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_task** +> V2beta1PipelineTaskDetail get_task(task_id) + +Gets a specific task by ID. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.RunServiceApi(api_client) + task_id = 'task_id_example' # str | + + try: + # Gets a specific task by ID. + api_response = api_instance.get_task(task_id) + pprint(api_response) + except ApiException as e: + print("Exception when calling RunServiceApi->get_task: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **task_id** | **str**| | + +### Return type + +[**V2beta1PipelineTaskDetail**](V2beta1PipelineTaskDetail.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | An unexpected error response. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **list_tasks** +> V2beta1ListTasksResponse list_tasks(parent_id=parent_id, run_id=run_id, namespace=namespace, page_size=page_size, page_token=page_token, filter=filter, order_by=order_by) + +Lists tasks with optional filtering. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.RunServiceApi(api_client) + parent_id = 'parent_id_example' # str | List all tasks with this parent task. (optional) +run_id = 'run_id_example' # str | List all tasks for this run. (optional) +namespace = 'namespace_example' # str | List all tasks in this namespace. The primary use case for this filter is to detect cache hits. (optional) +page_size = 56 # int | (optional) +page_token = 'page_token_example' # str | (optional) +filter = 'filter_example' # str | (optional) +order_by = 'order_by_example' # str | (optional) + + try: + # Lists tasks with optional filtering. + api_response = api_instance.list_tasks(parent_id=parent_id, run_id=run_id, namespace=namespace, page_size=page_size, page_token=page_token, filter=filter, order_by=order_by) + pprint(api_response) + except ApiException as e: + print("Exception when calling RunServiceApi->list_tasks: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **parent_id** | **str**| List all tasks with this parent task. | [optional] + **run_id** | **str**| List all tasks for this run. | [optional] + **namespace** | **str**| List all tasks in this namespace. The primary use case for this filter is to detect cache hits. | [optional] + **page_size** | **int**| | [optional] + **page_token** | **str**| | [optional] + **filter** | **str**| | [optional] + **order_by** | **str**| | [optional] + +### Return type + +[**V2beta1ListTasksResponse**](V2beta1ListTasksResponse.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | An unexpected error response. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + # **run_service_archive_run** > object run_service_archive_run(run_id, experiment_id=experiment_id) @@ -250,7 +571,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **run_service_get_run** -> V2beta1Run run_service_get_run(run_id, experiment_id=experiment_id) +> V2beta1Run run_service_get_run(run_id, experiment_id=experiment_id, view=view) Finds a specific run by ID. @@ -290,10 +611,11 @@ with kfp_server_api.ApiClient(configuration) as api_client: api_instance = kfp_server_api.RunServiceApi(api_client) run_id = 'run_id_example' # str | The ID of the run to be retrieved. experiment_id = 'experiment_id_example' # str | The ID of the parent experiment. (optional) +view = 'DEFAULT' # str | Optional view mode. This field can be used to adjust how detailed the Run object that is returned will be. - DEFAULT: By default `tasks` field is omitted. This provides a faster and leaner run object. - FULL: This view mode displays all the tasks for this run with all its fields populated. (optional) (default to 'DEFAULT') try: # Finds a specific run by ID. - api_response = api_instance.run_service_get_run(run_id, experiment_id=experiment_id) + api_response = api_instance.run_service_get_run(run_id, experiment_id=experiment_id, view=view) pprint(api_response) except ApiException as e: print("Exception when calling RunServiceApi->run_service_get_run: %s\n" % e) @@ -305,6 +627,7 @@ Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **run_id** | **str**| The ID of the run to be retrieved. | **experiment_id** | **str**| The ID of the parent experiment. | [optional] + **view** | **str**| Optional view mode. This field can be used to adjust how detailed the Run object that is returned will be. - DEFAULT: By default `tasks` field is omitted. This provides a faster and leaner run object. - FULL: This view mode displays all the tasks for this run with all its fields populated. | [optional] [default to 'DEFAULT'] ### Return type @@ -328,7 +651,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **run_service_list_runs** -> V2beta1ListRunsResponse run_service_list_runs(namespace=namespace, experiment_id=experiment_id, page_token=page_token, page_size=page_size, sort_by=sort_by, filter=filter) +> V2beta1ListRunsResponse run_service_list_runs(namespace=namespace, experiment_id=experiment_id, page_token=page_token, page_size=page_size, sort_by=sort_by, filter=filter, view=view) Finds all runs in an experiment given by experiment ID. If experiment id is not specified, finds all runs across all experiments. @@ -372,10 +695,11 @@ page_token = 'page_token_example' # str | A page token to request the next page page_size = 56 # int | The number of runs to be listed per page. If there are more runs than this number, the response message will contain a nextPageToken field you can use to fetch the next page. (optional) sort_by = 'sort_by_example' # str | Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" (Example, \"name asc\" or \"id desc\"). Ascending by default. (optional) filter = 'filter_example' # str | A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). (optional) +view = 'DEFAULT' # str | Optional view mode. This field can be used to adjust how detailed the Run object that is returned will be. - DEFAULT: By default `tasks` field is omitted. This provides a faster and leaner run object. - FULL: This view mode displays all the tasks for this run with all its fields populated. (optional) (default to 'DEFAULT') try: # Finds all runs in an experiment given by experiment ID. If experiment id is not specified, finds all runs across all experiments. - api_response = api_instance.run_service_list_runs(namespace=namespace, experiment_id=experiment_id, page_token=page_token, page_size=page_size, sort_by=sort_by, filter=filter) + api_response = api_instance.run_service_list_runs(namespace=namespace, experiment_id=experiment_id, page_token=page_token, page_size=page_size, sort_by=sort_by, filter=filter, view=view) pprint(api_response) except ApiException as e: print("Exception when calling RunServiceApi->run_service_list_runs: %s\n" % e) @@ -391,6 +715,7 @@ Name | Type | Description | Notes **page_size** | **int**| The number of runs to be listed per page. If there are more runs than this number, the response message will contain a nextPageToken field you can use to fetch the next page. | [optional] **sort_by** | **str**| Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" (Example, \"name asc\" or \"id desc\"). Ascending by default. | [optional] **filter** | **str**| A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). | [optional] + **view** | **str**| Optional view mode. This field can be used to adjust how detailed the Run object that is returned will be. - DEFAULT: By default `tasks` field is omitted. This provides a faster and leaner run object. - FULL: This view mode displays all the tasks for this run with all its fields populated. | [optional] [default to 'DEFAULT'] ### Return type @@ -729,3 +1054,81 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) +# **update_task** +> V2beta1PipelineTaskDetail update_task(task_id, task) + +Updates an existing task. + +### Example + +* Api Key Authentication (Bearer): +```python +from __future__ import print_function +import time +import kfp_server_api +from kfp_server_api.rest import ApiException +from pprint import pprint +# Defining the host is optional and defaults to http://localhost +# See configuration.py for a list of all supported configuration parameters. +configuration = kfp_server_api.Configuration( + host = "http://localhost" +) + +# The client must configure the authentication and authorization parameters +# in accordance with the API server security policy. +# Examples for each auth method are provided below, use the example that +# satisfies your auth use case. + +# Configure API key authorization: Bearer +configuration = kfp_server_api.Configuration( + host = "http://localhost", + api_key = { + 'authorization': 'YOUR_API_KEY' + } +) +# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed +# configuration.api_key_prefix['authorization'] = 'Bearer' + +# Enter a context with an instance of the API client +with kfp_server_api.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = kfp_server_api.RunServiceApi(api_client) + task_id = 'task_id_example' # str | +task = kfp_server_api.V2beta1PipelineTaskDetail() # V2beta1PipelineTaskDetail | + + try: + # Updates an existing task. + api_response = api_instance.update_task(task_id, task) + pprint(api_response) + except ApiException as e: + print("Exception when calling RunServiceApi->update_task: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **task_id** | **str**| | + **task** | [**V2beta1PipelineTaskDetail**](V2beta1PipelineTaskDetail.md)| | + +### Return type + +[**V2beta1PipelineTaskDetail**](V2beta1PipelineTaskDetail.md) + +### Authorization + +[Bearer](../README.md#Bearer) + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | A successful response. | - | +**0** | An unexpected error response. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1Artifact.md b/backend/api/v2beta1/python_http_client/docs/V2beta1Artifact.md new file mode 100644 index 00000000000..698604cb1bb --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1Artifact.md @@ -0,0 +1,18 @@ +# V2beta1Artifact + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**artifact_id** | **str** | | [optional] [readonly] +**name** | **str** | Required. The client provided name of the artifact. Note: in MLMD when name was set, it had to be unique for that type_id this restriction is removed here If this is a \"Metric\" artifact, the name of the metric is treated as the Key in its K/V pair. | [optional] +**description** | **str** | | [optional] +**type** | [**ArtifactArtifactType**](ArtifactArtifactType.md) | | [optional] +**uri** | **str** | The uniform resource identifier of the physical artifact. May be empty if there is no physical artifact. | [optional] +**metadata** | **dict(str, object)** | Optional. User provided custom properties which are not defined by its type. | [optional] +**number_value** | **float** | | [optional] +**created_at** | **datetime** | Output only. Create time of the artifact in millisecond since epoch. Note: The type and name is updated from mlmd artifact to be consistent with other backend apis. | [optional] [readonly] +**namespace** | **str** | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1ArtifactTask.md b/backend/api/v2beta1/python_http_client/docs/V2beta1ArtifactTask.md new file mode 100644 index 00000000000..ef63cfe864d --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1ArtifactTask.md @@ -0,0 +1,16 @@ +# V2beta1ArtifactTask + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | Output only. The unique server generated id of the ArtifactTask. | [optional] [readonly] +**artifact_id** | **str** | | [optional] +**run_id** | **str** | | [optional] +**task_id** | **str** | | [optional] +**type** | [**V2beta1IOType**](V2beta1IOType.md) | | [optional] +**producer** | [**V2beta1IOProducer**](V2beta1IOProducer.md) | | [optional] +**key** | **str** | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1CreateArtifactRequest.md b/backend/api/v2beta1/python_http_client/docs/V2beta1CreateArtifactRequest.md new file mode 100644 index 00000000000..6e16b3e0b84 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1CreateArtifactRequest.md @@ -0,0 +1,15 @@ +# V2beta1CreateArtifactRequest + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**artifact** | [**V2beta1Artifact**](V2beta1Artifact.md) | | [optional] +**run_id** | **str** | An artifact is always created in the context of a run. | [optional] +**task_id** | **str** | The Task that is associated with the creation of this artifact. | [optional] +**producer_key** | **str** | | [optional] +**iteration_index** | **str** | | [optional] +**type** | [**V2beta1IOType**](V2beta1IOType.md) | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1CreateArtifactTaskRequest.md b/backend/api/v2beta1/python_http_client/docs/V2beta1CreateArtifactTaskRequest.md new file mode 100644 index 00000000000..e4c2e3bb3f9 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1CreateArtifactTaskRequest.md @@ -0,0 +1,10 @@ +# V2beta1CreateArtifactTaskRequest + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**artifact_task** | [**V2beta1ArtifactTask**](V2beta1ArtifactTask.md) | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1CreateArtifactTasksBulkRequest.md b/backend/api/v2beta1/python_http_client/docs/V2beta1CreateArtifactTasksBulkRequest.md new file mode 100644 index 00000000000..cf65a100984 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1CreateArtifactTasksBulkRequest.md @@ -0,0 +1,10 @@ +# V2beta1CreateArtifactTasksBulkRequest + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**artifact_tasks** | [**list[V2beta1ArtifactTask]**](V2beta1ArtifactTask.md) | Required. The list of artifact-task relationships to create. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1CreateArtifactTasksBulkResponse.md b/backend/api/v2beta1/python_http_client/docs/V2beta1CreateArtifactTasksBulkResponse.md new file mode 100644 index 00000000000..7455094781f --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1CreateArtifactTasksBulkResponse.md @@ -0,0 +1,10 @@ +# V2beta1CreateArtifactTasksBulkResponse + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**artifact_tasks** | [**list[V2beta1ArtifactTask]**](V2beta1ArtifactTask.md) | The list of created artifact-task relationships. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1CreateArtifactsBulkRequest.md b/backend/api/v2beta1/python_http_client/docs/V2beta1CreateArtifactsBulkRequest.md new file mode 100644 index 00000000000..a4c948d7525 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1CreateArtifactsBulkRequest.md @@ -0,0 +1,10 @@ +# V2beta1CreateArtifactsBulkRequest + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**artifacts** | [**list[V2beta1CreateArtifactRequest]**](V2beta1CreateArtifactRequest.md) | Required. The list of artifacts to create. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1CreateArtifactsBulkResponse.md b/backend/api/v2beta1/python_http_client/docs/V2beta1CreateArtifactsBulkResponse.md new file mode 100644 index 00000000000..ddc1004e8a9 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1CreateArtifactsBulkResponse.md @@ -0,0 +1,10 @@ +# V2beta1CreateArtifactsBulkResponse + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**artifacts** | [**list[V2beta1Artifact]**](V2beta1Artifact.md) | The list of created artifacts. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1GetRunRequestViewMode.md b/backend/api/v2beta1/python_http_client/docs/V2beta1GetRunRequestViewMode.md new file mode 100644 index 00000000000..eb28713af9b --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1GetRunRequestViewMode.md @@ -0,0 +1,10 @@ +# V2beta1GetRunRequestViewMode + + - DEFAULT: By default `tasks` field is omitted. This provides a faster and leaner run object. - FULL: This view mode displays all the tasks for this run with all its fields populated. +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1IOProducer.md b/backend/api/v2beta1/python_http_client/docs/V2beta1IOProducer.md new file mode 100644 index 00000000000..b9c28434ac7 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1IOProducer.md @@ -0,0 +1,11 @@ +# V2beta1IOProducer + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**task_name** | **str** | | [optional] +**iteration** | **str** | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1IOType.md b/backend/api/v2beta1/python_http_client/docs/V2beta1IOType.md new file mode 100644 index 00000000000..dd324291e93 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1IOType.md @@ -0,0 +1,10 @@ +# V2beta1IOType + +Describes the I/O relationship between Artifacts/Parameters and Tasks. There are a couple of instances where input/outputs have special types such as in the case of LoopArguments or dsl.Collected outputs. - UNSPECIFIED: For validation - COMPONENT_DEFAULT_INPUT: This is used for inputs that are provided via default parameters in the component input definitions - TASK_OUTPUT_INPUT: This is used for inputs that are provided via upstream tasks. In the sdk this appears as: TaskInputsSpec.kind.task_output_parameter & TaskInputsSpec.kind.task_output_artifact - COMPONENT_INPUT: Used for inputs that are passed from parent tasks. - RUNTIME_VALUE_INPUT: Hardcoded values passed as arguments to the task. - COLLECTED_INPUTS: Used for dsl.Collected Usage of this type indicates that all Artifacts within the IOArtifact.artifacts are inputs collected from sub tasks with ITERATOR_OUTPUT outputs. - ITERATOR_INPUT: In a for loop task, introduced via ParallelFor, this type is used to indicate whether this resolved input belongs to a parameterIterator or artifactIterator. In such a case the \"artifacts\" field for IOArtifact.artifacts is the list of resolved items for this parallelFor. - ITERATOR_INPUT_RAW: Hardcoded iterator parameters. Raw Iterator inputs have no producer - ITERATOR_OUTPUT: When an output is produced by a Runtime Iteration Task This value is use to differentiate between standard inputs - OUTPUT: All other output types fall under this type. - ONE_OF_OUTPUT: An output of a Conditions branch. +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1ListArtifactResponse.md b/backend/api/v2beta1/python_http_client/docs/V2beta1ListArtifactResponse.md new file mode 100644 index 00000000000..8b8b97177f1 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1ListArtifactResponse.md @@ -0,0 +1,12 @@ +# V2beta1ListArtifactResponse + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**artifacts** | [**list[V2beta1Artifact]**](V2beta1Artifact.md) | The list of artifacts returned. | [optional] +**total_size** | **int** | The total number of artifacts available. This field is not always populated. | [optional] +**next_page_token** | **str** | A token to retrieve the next page of results, or empty if there are no more results in the list. | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1ListArtifactTasksResponse.md b/backend/api/v2beta1/python_http_client/docs/V2beta1ListArtifactTasksResponse.md new file mode 100644 index 00000000000..d895723b7c8 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1ListArtifactTasksResponse.md @@ -0,0 +1,12 @@ +# V2beta1ListArtifactTasksResponse + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**artifact_tasks** | [**list[V2beta1ArtifactTask]**](V2beta1ArtifactTask.md) | | [optional] +**total_size** | **int** | | [optional] +**next_page_token** | **str** | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1ListRunsRequestViewMode.md b/backend/api/v2beta1/python_http_client/docs/V2beta1ListRunsRequestViewMode.md new file mode 100644 index 00000000000..c2cfa30980c --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1ListRunsRequestViewMode.md @@ -0,0 +1,10 @@ +# V2beta1ListRunsRequestViewMode + + - DEFAULT: By default `tasks` field is omitted. This provides a faster and leaner run object. - FULL: This view mode displays all the tasks for this run with all its fields populated. +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1ListTasksResponse.md b/backend/api/v2beta1/python_http_client/docs/V2beta1ListTasksResponse.md new file mode 100644 index 00000000000..ed996a81721 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1ListTasksResponse.md @@ -0,0 +1,12 @@ +# V2beta1ListTasksResponse + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**tasks** | [**list[V2beta1PipelineTaskDetail]**](V2beta1PipelineTaskDetail.md) | | [optional] +**next_page_token** | **str** | | [optional] +**total_size** | **int** | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1PipelineTaskDetail.md b/backend/api/v2beta1/python_http_client/docs/V2beta1PipelineTaskDetail.md index 3b197b439e2..1a4feb097a1 100644 --- a/backend/api/v2beta1/python_http_client/docs/V2beta1PipelineTaskDetail.md +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1PipelineTaskDetail.md @@ -4,22 +4,26 @@ Runtime information of a task execution. ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**run_id** | **str** | ID of the parent run. | [optional] -**task_id** | **str** | System-generated ID of a task. | [optional] +**name** | **str** | | [optional] **display_name** | **str** | User specified name of a task that is defined in [Pipeline.spec][]. | [optional] +**task_id** | **str** | System-generated ID of a task. | [optional] +**run_id** | **str** | ID of the parent run. | [optional] +**pods** | [**list[PipelineTaskDetailTaskPod]**](PipelineTaskDetailTaskPod.md) | | [optional] +**cache_fingerprint** | **str** | | [optional] **create_time** | **datetime** | Creation time of a task. | [optional] **start_time** | **datetime** | Starting time of a task. | [optional] **end_time** | **datetime** | Completion time of a task. | [optional] -**executor_detail** | [**V2beta1PipelineTaskExecutorDetail**](V2beta1PipelineTaskExecutorDetail.md) | | [optional] -**state** | [**V2beta1RuntimeState**](V2beta1RuntimeState.md) | | [optional] -**execution_id** | **str** | Execution id of the corresponding entry in ML metadata store. | [optional] +**state** | [**PipelineTaskDetailTaskState**](PipelineTaskDetailTaskState.md) | | [optional] +**status_metadata** | [**PipelineTaskDetailStatusMetadata**](PipelineTaskDetailStatusMetadata.md) | | [optional] +**state_history** | [**list[PipelineTaskDetailTaskStatus]**](PipelineTaskDetailTaskStatus.md) | A sequence of task statuses. This field keeps a record of state transitions. | [optional] +**type** | [**PipelineTaskDetailTaskType**](PipelineTaskDetailTaskType.md) | | [optional] +**type_attributes** | [**PipelineTaskDetailTypeAttributes**](PipelineTaskDetailTypeAttributes.md) | | [optional] **error** | [**GooglerpcStatus**](GooglerpcStatus.md) | | [optional] -**inputs** | [**dict(str, V2beta1ArtifactList)**](V2beta1ArtifactList.md) | Input artifacts of the task. | [optional] -**outputs** | [**dict(str, V2beta1ArtifactList)**](V2beta1ArtifactList.md) | Output artifacts of the task. | [optional] **parent_task_id** | **str** | ID of the parent task if the task is within a component scope. Empty if the task is at the root level. | [optional] -**state_history** | [**list[V2beta1RuntimeStatus]**](V2beta1RuntimeStatus.md) | A sequence of task statuses. This field keeps a record of state transitions. | [optional] -**pod_name** | **str** | Name of the corresponding pod assigned by the orchestration engine. Also known as node_id. | [optional] -**child_tasks** | [**list[PipelineTaskDetailChildTask]**](PipelineTaskDetailChildTask.md) | Sequence of dependen tasks. | [optional] +**child_tasks** | [**list[PipelineTaskDetailChildTask]**](PipelineTaskDetailChildTask.md) | Sequence of dependent tasks. | [optional] +**inputs** | [**PipelineTaskDetailInputOutputs**](PipelineTaskDetailInputOutputs.md) | | [optional] +**outputs** | [**PipelineTaskDetailInputOutputs**](PipelineTaskDetailInputOutputs.md) | | [optional] +**scope_path** | **list[str]** | The scope of this task within the pipeline spec. Each entry represents either a Dag Task or a Container task. Note that Container task will are always the last entry in a scope_path. | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1PipelineTaskExecutorDetail.md b/backend/api/v2beta1/python_http_client/docs/V2beta1PipelineTaskExecutorDetail.md deleted file mode 100644 index d09a761c943..00000000000 --- a/backend/api/v2beta1/python_http_client/docs/V2beta1PipelineTaskExecutorDetail.md +++ /dev/null @@ -1,14 +0,0 @@ -# V2beta1PipelineTaskExecutorDetail - -Runtime information of a pipeline task executor. -## Properties -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**main_job** | **str** | The name of the job for the main container execution. | [optional] -**pre_caching_check_job** | **str** | The name of the job for the pre-caching-check container execution. This job will be available if the Run.pipeline_spec specifies the `pre_caching_check` hook in the lifecycle events. | [optional] -**failed_main_jobs** | **list[str]** | The names of the previously failed job for the main container executions. The list includes the all attempts in chronological order. | [optional] -**failed_pre_caching_check_jobs** | **list[str]** | The names of the previously failed job for the pre-caching-check container executions. This job will be available if the Run.pipeline_spec specifies the `pre_caching_check` hook in the lifecycle events. The list includes the all attempts in chronological order. | [optional] - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1Run.md b/backend/api/v2beta1/python_http_client/docs/V2beta1Run.md index dbb37fdf0fb..1b3189c6487 100644 --- a/backend/api/v2beta1/python_http_client/docs/V2beta1Run.md +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1Run.md @@ -21,6 +21,9 @@ Name | Type | Description | Notes **run_details** | [**V2beta1RunDetails**](V2beta1RunDetails.md) | | [optional] **recurring_run_id** | **str** | ID of the recurring run that triggered this run. | [optional] **state_history** | [**list[V2beta1RuntimeStatus]**](V2beta1RuntimeStatus.md) | Output. A sequence of run statuses. This field keeps a record of state transitions. | [optional] +**pipeline_reference** | [**V2beta1PipelineVersionReference**](V2beta1PipelineVersionReference.md) | | [optional] +**task_count** | **int** | | [optional] +**tasks** | [**list[V2beta1PipelineTaskDetail]**](V2beta1PipelineTaskDetail.md) | | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1UpdateTasksBulkRequest.md b/backend/api/v2beta1/python_http_client/docs/V2beta1UpdateTasksBulkRequest.md new file mode 100644 index 00000000000..c2ba0bd715e --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1UpdateTasksBulkRequest.md @@ -0,0 +1,10 @@ +# V2beta1UpdateTasksBulkRequest + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**tasks** | [**dict(str, V2beta1PipelineTaskDetail)**](V2beta1PipelineTaskDetail.md) | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/docs/V2beta1UpdateTasksBulkResponse.md b/backend/api/v2beta1/python_http_client/docs/V2beta1UpdateTasksBulkResponse.md new file mode 100644 index 00000000000..38e29ba754e --- /dev/null +++ b/backend/api/v2beta1/python_http_client/docs/V2beta1UpdateTasksBulkResponse.md @@ -0,0 +1,10 @@ +# V2beta1UpdateTasksBulkResponse + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**tasks** | [**dict(str, V2beta1PipelineTaskDetail)**](V2beta1PipelineTaskDetail.md) | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/__init__.py b/backend/api/v2beta1/python_http_client/kfp_server_api/__init__.py index 2d8e2f632d1..3232236a325 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/__init__.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/__init__.py @@ -17,6 +17,7 @@ __version__ = "2.14.6" # import apis into sdk package +from kfp_server_api.api.artifact_service_api import ArtifactServiceApi from kfp_server_api.api.auth_service_api import AuthServiceApi from kfp_server_api.api.experiment_service_api import ExperimentServiceApi from kfp_server_api.api.healthz_service_api import HealthzServiceApi @@ -36,32 +37,56 @@ from kfp_server_api.exceptions import ApiKeyError from kfp_server_api.exceptions import ApiException # import models into sdk package +from kfp_server_api.models.artifact_artifact_type import ArtifactArtifactType from kfp_server_api.models.authorize_request_resources import AuthorizeRequestResources from kfp_server_api.models.authorize_request_verb import AuthorizeRequestVerb from kfp_server_api.models.googlerpc_status import GooglerpcStatus +from kfp_server_api.models.input_outputs_io_artifact import InputOutputsIOArtifact +from kfp_server_api.models.input_outputs_io_parameter import InputOutputsIOParameter from kfp_server_api.models.pipeline_task_detail_child_task import PipelineTaskDetailChildTask +from kfp_server_api.models.pipeline_task_detail_input_outputs import PipelineTaskDetailInputOutputs +from kfp_server_api.models.pipeline_task_detail_status_metadata import PipelineTaskDetailStatusMetadata +from kfp_server_api.models.pipeline_task_detail_task_pod import PipelineTaskDetailTaskPod +from kfp_server_api.models.pipeline_task_detail_task_pod_type import PipelineTaskDetailTaskPodType +from kfp_server_api.models.pipeline_task_detail_task_state import PipelineTaskDetailTaskState +from kfp_server_api.models.pipeline_task_detail_task_status import PipelineTaskDetailTaskStatus +from kfp_server_api.models.pipeline_task_detail_task_type import PipelineTaskDetailTaskType +from kfp_server_api.models.pipeline_task_detail_type_attributes import PipelineTaskDetailTypeAttributes from kfp_server_api.models.predicate_int_values import PredicateIntValues from kfp_server_api.models.predicate_long_values import PredicateLongValues from kfp_server_api.models.predicate_string_values import PredicateStringValues from kfp_server_api.models.protobuf_any import ProtobufAny from kfp_server_api.models.protobuf_null_value import ProtobufNullValue from kfp_server_api.models.recurring_run_mode import RecurringRunMode -from kfp_server_api.models.v2beta1_artifact_list import V2beta1ArtifactList +from kfp_server_api.models.v2beta1_artifact import V2beta1Artifact +from kfp_server_api.models.v2beta1_artifact_task import V2beta1ArtifactTask +from kfp_server_api.models.v2beta1_create_artifact_request import V2beta1CreateArtifactRequest +from kfp_server_api.models.v2beta1_create_artifact_task_request import V2beta1CreateArtifactTaskRequest +from kfp_server_api.models.v2beta1_create_artifact_tasks_bulk_request import V2beta1CreateArtifactTasksBulkRequest +from kfp_server_api.models.v2beta1_create_artifact_tasks_bulk_response import V2beta1CreateArtifactTasksBulkResponse +from kfp_server_api.models.v2beta1_create_artifacts_bulk_request import V2beta1CreateArtifactsBulkRequest +from kfp_server_api.models.v2beta1_create_artifacts_bulk_response import V2beta1CreateArtifactsBulkResponse from kfp_server_api.models.v2beta1_create_pipeline_and_version_request import V2beta1CreatePipelineAndVersionRequest from kfp_server_api.models.v2beta1_cron_schedule import V2beta1CronSchedule from kfp_server_api.models.v2beta1_experiment import V2beta1Experiment from kfp_server_api.models.v2beta1_experiment_storage_state import V2beta1ExperimentStorageState from kfp_server_api.models.v2beta1_filter import V2beta1Filter from kfp_server_api.models.v2beta1_get_healthz_response import V2beta1GetHealthzResponse +from kfp_server_api.models.v2beta1_get_run_request_view_mode import V2beta1GetRunRequestViewMode +from kfp_server_api.models.v2beta1_io_producer import V2beta1IOProducer +from kfp_server_api.models.v2beta1_io_type import V2beta1IOType +from kfp_server_api.models.v2beta1_list_artifact_response import V2beta1ListArtifactResponse +from kfp_server_api.models.v2beta1_list_artifact_tasks_response import V2beta1ListArtifactTasksResponse from kfp_server_api.models.v2beta1_list_experiments_response import V2beta1ListExperimentsResponse from kfp_server_api.models.v2beta1_list_pipeline_versions_response import V2beta1ListPipelineVersionsResponse from kfp_server_api.models.v2beta1_list_pipelines_response import V2beta1ListPipelinesResponse from kfp_server_api.models.v2beta1_list_recurring_runs_response import V2beta1ListRecurringRunsResponse +from kfp_server_api.models.v2beta1_list_runs_request_view_mode import V2beta1ListRunsRequestViewMode from kfp_server_api.models.v2beta1_list_runs_response import V2beta1ListRunsResponse +from kfp_server_api.models.v2beta1_list_tasks_response import V2beta1ListTasksResponse from kfp_server_api.models.v2beta1_periodic_schedule import V2beta1PeriodicSchedule from kfp_server_api.models.v2beta1_pipeline import V2beta1Pipeline from kfp_server_api.models.v2beta1_pipeline_task_detail import V2beta1PipelineTaskDetail -from kfp_server_api.models.v2beta1_pipeline_task_executor_detail import V2beta1PipelineTaskExecutorDetail from kfp_server_api.models.v2beta1_pipeline_version import V2beta1PipelineVersion from kfp_server_api.models.v2beta1_pipeline_version_reference import V2beta1PipelineVersionReference from kfp_server_api.models.v2beta1_predicate import V2beta1Predicate @@ -76,6 +101,8 @@ from kfp_server_api.models.v2beta1_runtime_state import V2beta1RuntimeState from kfp_server_api.models.v2beta1_runtime_status import V2beta1RuntimeStatus from kfp_server_api.models.v2beta1_trigger import V2beta1Trigger +from kfp_server_api.models.v2beta1_update_tasks_bulk_request import V2beta1UpdateTasksBulkRequest +from kfp_server_api.models.v2beta1_update_tasks_bulk_response import V2beta1UpdateTasksBulkResponse from kfp_server_api.models.v2beta1_url import V2beta1Url from kfp_server_api.models.v2beta1_visualization import V2beta1Visualization from kfp_server_api.models.v2beta1_visualization_type import V2beta1VisualizationType diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/api/__init__.py b/backend/api/v2beta1/python_http_client/kfp_server_api/api/__init__.py index 2c1d6567c2d..eb9745770ca 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/api/__init__.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/api/__init__.py @@ -3,6 +3,7 @@ # flake8: noqa # import apis into api package +from kfp_server_api.api.artifact_service_api import ArtifactServiceApi from kfp_server_api.api.auth_service_api import AuthServiceApi from kfp_server_api.api.experiment_service_api import ExperimentServiceApi from kfp_server_api.api.healthz_service_api import HealthzServiceApi diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/api/artifact_service_api.py b/backend/api/v2beta1/python_http_client/kfp_server_api/api/artifact_service_api.py new file mode 100644 index 00000000000..fc0bc5d6a3f --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/api/artifact_service_api.py @@ -0,0 +1,979 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +# python 2 and python 3 compatibility library +import six + +from kfp_server_api.api_client import ApiClient +from kfp_server_api.exceptions import ( # noqa: F401 + ApiTypeError, + ApiValueError +) + + +class ArtifactServiceApi(object): + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): + if api_client is None: + api_client = ApiClient() + self.api_client = api_client + + def batch_create_artifact_tasks(self, body, **kwargs): # noqa: E501 + """Creates multiple artifact-task relationships in bulk. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.batch_create_artifact_tasks(body, async_req=True) + >>> result = thread.get() + + :param body: (required) + :type body: V2beta1CreateArtifactTasksBulkRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: V2beta1CreateArtifactTasksBulkResponse + """ + kwargs['_return_http_data_only'] = True + return self.batch_create_artifact_tasks_with_http_info(body, **kwargs) # noqa: E501 + + def batch_create_artifact_tasks_with_http_info(self, body, **kwargs): # noqa: E501 + """Creates multiple artifact-task relationships in bulk. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.batch_create_artifact_tasks_with_http_info(body, async_req=True) + >>> result = thread.get() + + :param body: (required) + :type body: V2beta1CreateArtifactTasksBulkRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(V2beta1CreateArtifactTasksBulkResponse, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'body' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method batch_create_artifact_tasks" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'body' is set + if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 + local_var_params['body'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `body` when calling `batch_create_artifact_tasks`") # noqa: E501 + + collection_formats = {} + + path_params = {} + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + if 'body' in local_var_params: + body_params = local_var_params['body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v2beta1/artifact_tasks:batchCreate', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='V2beta1CreateArtifactTasksBulkResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def batch_create_artifacts(self, body, **kwargs): # noqa: E501 + """Creates multiple artifacts in bulk. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.batch_create_artifacts(body, async_req=True) + >>> result = thread.get() + + :param body: (required) + :type body: V2beta1CreateArtifactsBulkRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: V2beta1CreateArtifactsBulkResponse + """ + kwargs['_return_http_data_only'] = True + return self.batch_create_artifacts_with_http_info(body, **kwargs) # noqa: E501 + + def batch_create_artifacts_with_http_info(self, body, **kwargs): # noqa: E501 + """Creates multiple artifacts in bulk. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.batch_create_artifacts_with_http_info(body, async_req=True) + >>> result = thread.get() + + :param body: (required) + :type body: V2beta1CreateArtifactsBulkRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(V2beta1CreateArtifactsBulkResponse, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'body' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method batch_create_artifacts" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'body' is set + if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 + local_var_params['body'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `body` when calling `batch_create_artifacts`") # noqa: E501 + + collection_formats = {} + + path_params = {} + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + if 'body' in local_var_params: + body_params = local_var_params['body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v2beta1/artifacts:batchCreate', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='V2beta1CreateArtifactsBulkResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def create_artifact(self, body, **kwargs): # noqa: E501 + """Creates a new artifact. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_artifact(body, async_req=True) + >>> result = thread.get() + + :param body: (required) + :type body: V2beta1CreateArtifactRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: V2beta1Artifact + """ + kwargs['_return_http_data_only'] = True + return self.create_artifact_with_http_info(body, **kwargs) # noqa: E501 + + def create_artifact_with_http_info(self, body, **kwargs): # noqa: E501 + """Creates a new artifact. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_artifact_with_http_info(body, async_req=True) + >>> result = thread.get() + + :param body: (required) + :type body: V2beta1CreateArtifactRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(V2beta1Artifact, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'body' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method create_artifact" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'body' is set + if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 + local_var_params['body'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `body` when calling `create_artifact`") # noqa: E501 + + collection_formats = {} + + path_params = {} + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + if 'body' in local_var_params: + body_params = local_var_params['body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v2beta1/artifacts', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='V2beta1Artifact', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def create_artifact_task(self, body, **kwargs): # noqa: E501 + """Creates an artifact-task relationship. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_artifact_task(body, async_req=True) + >>> result = thread.get() + + :param body: (required) + :type body: V2beta1CreateArtifactTaskRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: V2beta1ArtifactTask + """ + kwargs['_return_http_data_only'] = True + return self.create_artifact_task_with_http_info(body, **kwargs) # noqa: E501 + + def create_artifact_task_with_http_info(self, body, **kwargs): # noqa: E501 + """Creates an artifact-task relationship. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_artifact_task_with_http_info(body, async_req=True) + >>> result = thread.get() + + :param body: (required) + :type body: V2beta1CreateArtifactTaskRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(V2beta1ArtifactTask, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'body' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method create_artifact_task" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'body' is set + if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 + local_var_params['body'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `body` when calling `create_artifact_task`") # noqa: E501 + + collection_formats = {} + + path_params = {} + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + if 'body' in local_var_params: + body_params = local_var_params['body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v2beta1/artifact_tasks', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='V2beta1ArtifactTask', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_artifact(self, artifact_id, **kwargs): # noqa: E501 + """Finds a specific Artifact by ID. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_artifact(artifact_id, async_req=True) + >>> result = thread.get() + + :param artifact_id: Required. The ID of the artifact to be retrieved. (required) + :type artifact_id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: V2beta1Artifact + """ + kwargs['_return_http_data_only'] = True + return self.get_artifact_with_http_info(artifact_id, **kwargs) # noqa: E501 + + def get_artifact_with_http_info(self, artifact_id, **kwargs): # noqa: E501 + """Finds a specific Artifact by ID. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_artifact_with_http_info(artifact_id, async_req=True) + >>> result = thread.get() + + :param artifact_id: Required. The ID of the artifact to be retrieved. (required) + :type artifact_id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(V2beta1Artifact, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'artifact_id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method get_artifact" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'artifact_id' is set + if self.api_client.client_side_validation and ('artifact_id' not in local_var_params or # noqa: E501 + local_var_params['artifact_id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `artifact_id` when calling `get_artifact`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'artifact_id' in local_var_params: + path_params['artifact_id'] = local_var_params['artifact_id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v2beta1/artifacts/{artifact_id}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='V2beta1Artifact', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_artifact_tasks(self, **kwargs): # noqa: E501 + """Lists artifact-task relationships. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_artifact_tasks(async_req=True) + >>> result = thread.get() + + :param task_ids: Optional, filter artifact task by a set of task_ids + :type task_ids: list[str] + :param run_ids: Optional, filter artifact task by a set of run_ids + :type run_ids: list[str] + :param artifact_ids: Optional, filter artifact task by a set of artifact_ids + :type artifact_ids: list[str] + :param type: Optional. Only list artifact tasks that have artifacts of this type. - UNSPECIFIED: For validation - COMPONENT_DEFAULT_INPUT: This is used for inputs that are provided via default parameters in the component input definitions - TASK_OUTPUT_INPUT: This is used for inputs that are provided via upstream tasks. In the sdk this appears as: TaskInputsSpec.kind.task_output_parameter & TaskInputsSpec.kind.task_output_artifact - COMPONENT_INPUT: Used for inputs that are passed from parent tasks. - RUNTIME_VALUE_INPUT: Hardcoded values passed as arguments to the task. - COLLECTED_INPUTS: Used for dsl.Collected Usage of this type indicates that all Artifacts within the IOArtifact.artifacts are inputs collected from sub tasks with ITERATOR_OUTPUT outputs. - ITERATOR_INPUT: In a for loop task, introduced via ParallelFor, this type is used to indicate whether this resolved input belongs to a parameterIterator or artifactIterator. In such a case the \"artifacts\" field for IOArtifact.artifacts is the list of resolved items for this parallelFor. - ITERATOR_INPUT_RAW: Hardcoded iterator parameters. Raw Iterator inputs have no producer - ITERATOR_OUTPUT: When an output is produced by a Runtime Iteration Task This value is use to differentiate between standard inputs - OUTPUT: All other output types fall under this type. - ONE_OF_OUTPUT: An output of a Conditions branch. + :type type: str + :param page_token: + :type page_token: str + :param page_size: + :type page_size: int + :param sort_by: + :type sort_by: str + :param filter: + :type filter: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: V2beta1ListArtifactTasksResponse + """ + kwargs['_return_http_data_only'] = True + return self.list_artifact_tasks_with_http_info(**kwargs) # noqa: E501 + + def list_artifact_tasks_with_http_info(self, **kwargs): # noqa: E501 + """Lists artifact-task relationships. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_artifact_tasks_with_http_info(async_req=True) + >>> result = thread.get() + + :param task_ids: Optional, filter artifact task by a set of task_ids + :type task_ids: list[str] + :param run_ids: Optional, filter artifact task by a set of run_ids + :type run_ids: list[str] + :param artifact_ids: Optional, filter artifact task by a set of artifact_ids + :type artifact_ids: list[str] + :param type: Optional. Only list artifact tasks that have artifacts of this type. - UNSPECIFIED: For validation - COMPONENT_DEFAULT_INPUT: This is used for inputs that are provided via default parameters in the component input definitions - TASK_OUTPUT_INPUT: This is used for inputs that are provided via upstream tasks. In the sdk this appears as: TaskInputsSpec.kind.task_output_parameter & TaskInputsSpec.kind.task_output_artifact - COMPONENT_INPUT: Used for inputs that are passed from parent tasks. - RUNTIME_VALUE_INPUT: Hardcoded values passed as arguments to the task. - COLLECTED_INPUTS: Used for dsl.Collected Usage of this type indicates that all Artifacts within the IOArtifact.artifacts are inputs collected from sub tasks with ITERATOR_OUTPUT outputs. - ITERATOR_INPUT: In a for loop task, introduced via ParallelFor, this type is used to indicate whether this resolved input belongs to a parameterIterator or artifactIterator. In such a case the \"artifacts\" field for IOArtifact.artifacts is the list of resolved items for this parallelFor. - ITERATOR_INPUT_RAW: Hardcoded iterator parameters. Raw Iterator inputs have no producer - ITERATOR_OUTPUT: When an output is produced by a Runtime Iteration Task This value is use to differentiate between standard inputs - OUTPUT: All other output types fall under this type. - ONE_OF_OUTPUT: An output of a Conditions branch. + :type type: str + :param page_token: + :type page_token: str + :param page_size: + :type page_size: int + :param sort_by: + :type sort_by: str + :param filter: + :type filter: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(V2beta1ListArtifactTasksResponse, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'task_ids', + 'run_ids', + 'artifact_ids', + 'type', + 'page_token', + 'page_size', + 'sort_by', + 'filter' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method list_artifact_tasks" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + + collection_formats = {} + + path_params = {} + + query_params = [] + if 'task_ids' in local_var_params and local_var_params['task_ids'] is not None: # noqa: E501 + query_params.append(('task_ids', local_var_params['task_ids'])) # noqa: E501 + collection_formats['task_ids'] = 'multi' # noqa: E501 + if 'run_ids' in local_var_params and local_var_params['run_ids'] is not None: # noqa: E501 + query_params.append(('run_ids', local_var_params['run_ids'])) # noqa: E501 + collection_formats['run_ids'] = 'multi' # noqa: E501 + if 'artifact_ids' in local_var_params and local_var_params['artifact_ids'] is not None: # noqa: E501 + query_params.append(('artifact_ids', local_var_params['artifact_ids'])) # noqa: E501 + collection_formats['artifact_ids'] = 'multi' # noqa: E501 + if 'type' in local_var_params and local_var_params['type'] is not None: # noqa: E501 + query_params.append(('type', local_var_params['type'])) # noqa: E501 + if 'page_token' in local_var_params and local_var_params['page_token'] is not None: # noqa: E501 + query_params.append(('page_token', local_var_params['page_token'])) # noqa: E501 + if 'page_size' in local_var_params and local_var_params['page_size'] is not None: # noqa: E501 + query_params.append(('page_size', local_var_params['page_size'])) # noqa: E501 + if 'sort_by' in local_var_params and local_var_params['sort_by'] is not None: # noqa: E501 + query_params.append(('sort_by', local_var_params['sort_by'])) # noqa: E501 + if 'filter' in local_var_params and local_var_params['filter'] is not None: # noqa: E501 + query_params.append(('filter', local_var_params['filter'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v2beta1/artifact_tasks', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='V2beta1ListArtifactTasksResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_artifacts(self, **kwargs): # noqa: E501 + """Finds all artifacts within the specified namespace. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_artifacts(async_req=True) + >>> result = thread.get() + + :param namespace: Optional input. Namespace for the artifacts. + :type namespace: str + :param page_token: A page token to request the results page. + :type page_token: str + :param page_size: The number of artifacts to be listed per page. If there are more artifacts than this number, the response message will contain a valid value in the nextPageToken field. + :type page_size: int + :param sort_by: Sorting order in form of \"field_name\", \"field_name asc\" or \"field_name desc\". Ascending by default. + :type sort_by: str + :param filter: A url-encoded, JSON-serialized filter protocol buffer (see [filter.proto](https://github.com/kubeflow/artifacts/blob/master/backend/api/filter.proto)). + :type filter: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: V2beta1ListArtifactResponse + """ + kwargs['_return_http_data_only'] = True + return self.list_artifacts_with_http_info(**kwargs) # noqa: E501 + + def list_artifacts_with_http_info(self, **kwargs): # noqa: E501 + """Finds all artifacts within the specified namespace. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_artifacts_with_http_info(async_req=True) + >>> result = thread.get() + + :param namespace: Optional input. Namespace for the artifacts. + :type namespace: str + :param page_token: A page token to request the results page. + :type page_token: str + :param page_size: The number of artifacts to be listed per page. If there are more artifacts than this number, the response message will contain a valid value in the nextPageToken field. + :type page_size: int + :param sort_by: Sorting order in form of \"field_name\", \"field_name asc\" or \"field_name desc\". Ascending by default. + :type sort_by: str + :param filter: A url-encoded, JSON-serialized filter protocol buffer (see [filter.proto](https://github.com/kubeflow/artifacts/blob/master/backend/api/filter.proto)). + :type filter: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(V2beta1ListArtifactResponse, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'namespace', + 'page_token', + 'page_size', + 'sort_by', + 'filter' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method list_artifacts" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + + collection_formats = {} + + path_params = {} + + query_params = [] + if 'namespace' in local_var_params and local_var_params['namespace'] is not None: # noqa: E501 + query_params.append(('namespace', local_var_params['namespace'])) # noqa: E501 + if 'page_token' in local_var_params and local_var_params['page_token'] is not None: # noqa: E501 + query_params.append(('page_token', local_var_params['page_token'])) # noqa: E501 + if 'page_size' in local_var_params and local_var_params['page_size'] is not None: # noqa: E501 + query_params.append(('page_size', local_var_params['page_size'])) # noqa: E501 + if 'sort_by' in local_var_params and local_var_params['sort_by'] is not None: # noqa: E501 + query_params.append(('sort_by', local_var_params['sort_by'])) # noqa: E501 + if 'filter' in local_var_params and local_var_params['filter'] is not None: # noqa: E501 + query_params.append(('filter', local_var_params['filter'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v2beta1/artifacts', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='V2beta1ListArtifactResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/api/run_service_api.py b/backend/api/v2beta1/python_http_client/kfp_server_api/api/run_service_api.py index c1b26e5f472..308c2880eea 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/api/run_service_api.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/api/run_service_api.py @@ -36,6 +36,540 @@ def __init__(self, api_client=None): api_client = ApiClient() self.api_client = api_client + def batch_update_tasks(self, body, **kwargs): # noqa: E501 + """Updates multiple tasks in bulk. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.batch_update_tasks(body, async_req=True) + >>> result = thread.get() + + :param body: (required) + :type body: V2beta1UpdateTasksBulkRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: V2beta1UpdateTasksBulkResponse + """ + kwargs['_return_http_data_only'] = True + return self.batch_update_tasks_with_http_info(body, **kwargs) # noqa: E501 + + def batch_update_tasks_with_http_info(self, body, **kwargs): # noqa: E501 + """Updates multiple tasks in bulk. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.batch_update_tasks_with_http_info(body, async_req=True) + >>> result = thread.get() + + :param body: (required) + :type body: V2beta1UpdateTasksBulkRequest + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(V2beta1UpdateTasksBulkResponse, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'body' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method batch_update_tasks" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'body' is set + if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501 + local_var_params['body'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `body` when calling `batch_update_tasks`") # noqa: E501 + + collection_formats = {} + + path_params = {} + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + if 'body' in local_var_params: + body_params = local_var_params['body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v2beta1/tasks:batchUpdate', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='V2beta1UpdateTasksBulkResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def create_task(self, task, **kwargs): # noqa: E501 + """Creates a new task. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_task(task, async_req=True) + >>> result = thread.get() + + :param task: (required) + :type task: V2beta1PipelineTaskDetail + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: V2beta1PipelineTaskDetail + """ + kwargs['_return_http_data_only'] = True + return self.create_task_with_http_info(task, **kwargs) # noqa: E501 + + def create_task_with_http_info(self, task, **kwargs): # noqa: E501 + """Creates a new task. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_task_with_http_info(task, async_req=True) + >>> result = thread.get() + + :param task: (required) + :type task: V2beta1PipelineTaskDetail + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(V2beta1PipelineTaskDetail, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'task' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method create_task" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'task' is set + if self.api_client.client_side_validation and ('task' not in local_var_params or # noqa: E501 + local_var_params['task'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `task` when calling `create_task`") # noqa: E501 + + collection_formats = {} + + path_params = {} + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + if 'task' in local_var_params: + body_params = local_var_params['task'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v2beta1/tasks', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='V2beta1PipelineTaskDetail', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_task(self, task_id, **kwargs): # noqa: E501 + """Gets a specific task by ID. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_task(task_id, async_req=True) + >>> result = thread.get() + + :param task_id: (required) + :type task_id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: V2beta1PipelineTaskDetail + """ + kwargs['_return_http_data_only'] = True + return self.get_task_with_http_info(task_id, **kwargs) # noqa: E501 + + def get_task_with_http_info(self, task_id, **kwargs): # noqa: E501 + """Gets a specific task by ID. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_task_with_http_info(task_id, async_req=True) + >>> result = thread.get() + + :param task_id: (required) + :type task_id: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(V2beta1PipelineTaskDetail, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'task_id' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method get_task" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'task_id' is set + if self.api_client.client_side_validation and ('task_id' not in local_var_params or # noqa: E501 + local_var_params['task_id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `task_id` when calling `get_task`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'task_id' in local_var_params: + path_params['task_id'] = local_var_params['task_id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v2beta1/tasks/{task_id}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='V2beta1PipelineTaskDetail', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_tasks(self, **kwargs): # noqa: E501 + """Lists tasks with optional filtering. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_tasks(async_req=True) + >>> result = thread.get() + + :param parent_id: List all tasks with this parent task. + :type parent_id: str + :param run_id: List all tasks for this run. + :type run_id: str + :param namespace: List all tasks in this namespace. The primary use case for this filter is to detect cache hits. + :type namespace: str + :param page_size: + :type page_size: int + :param page_token: + :type page_token: str + :param filter: + :type filter: str + :param order_by: + :type order_by: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: V2beta1ListTasksResponse + """ + kwargs['_return_http_data_only'] = True + return self.list_tasks_with_http_info(**kwargs) # noqa: E501 + + def list_tasks_with_http_info(self, **kwargs): # noqa: E501 + """Lists tasks with optional filtering. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_tasks_with_http_info(async_req=True) + >>> result = thread.get() + + :param parent_id: List all tasks with this parent task. + :type parent_id: str + :param run_id: List all tasks for this run. + :type run_id: str + :param namespace: List all tasks in this namespace. The primary use case for this filter is to detect cache hits. + :type namespace: str + :param page_size: + :type page_size: int + :param page_token: + :type page_token: str + :param filter: + :type filter: str + :param order_by: + :type order_by: str + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(V2beta1ListTasksResponse, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'parent_id', + 'run_id', + 'namespace', + 'page_size', + 'page_token', + 'filter', + 'order_by' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method list_tasks" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + + collection_formats = {} + + path_params = {} + + query_params = [] + if 'parent_id' in local_var_params and local_var_params['parent_id'] is not None: # noqa: E501 + query_params.append(('parent_id', local_var_params['parent_id'])) # noqa: E501 + if 'run_id' in local_var_params and local_var_params['run_id'] is not None: # noqa: E501 + query_params.append(('run_id', local_var_params['run_id'])) # noqa: E501 + if 'namespace' in local_var_params and local_var_params['namespace'] is not None: # noqa: E501 + query_params.append(('namespace', local_var_params['namespace'])) # noqa: E501 + if 'page_size' in local_var_params and local_var_params['page_size'] is not None: # noqa: E501 + query_params.append(('page_size', local_var_params['page_size'])) # noqa: E501 + if 'page_token' in local_var_params and local_var_params['page_token'] is not None: # noqa: E501 + query_params.append(('page_token', local_var_params['page_token'])) # noqa: E501 + if 'filter' in local_var_params and local_var_params['filter'] is not None: # noqa: E501 + query_params.append(('filter', local_var_params['filter'])) # noqa: E501 + if 'order_by' in local_var_params and local_var_params['order_by'] is not None: # noqa: E501 + query_params.append(('order_by', local_var_params['order_by'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v2beta1/tasks', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='V2beta1ListTasksResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + def run_service_archive_run(self, run_id, **kwargs): # noqa: E501 """Archives a run in an experiment given by run ID and experiment ID. # noqa: E501 @@ -440,6 +974,8 @@ def run_service_get_run(self, run_id, **kwargs): # noqa: E501 :type run_id: str :param experiment_id: The ID of the parent experiment. :type experiment_id: str + :param view: Optional view mode. This field can be used to adjust how detailed the Run object that is returned will be. - DEFAULT: By default `tasks` field is omitted. This provides a faster and leaner run object. - FULL: This view mode displays all the tasks for this run with all its fields populated. + :type view: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will @@ -470,6 +1006,8 @@ def run_service_get_run_with_http_info(self, run_id, **kwargs): # noqa: E501 :type run_id: str :param experiment_id: The ID of the parent experiment. :type experiment_id: str + :param view: Optional view mode. This field can be used to adjust how detailed the Run object that is returned will be. - DEFAULT: By default `tasks` field is omitted. This provides a faster and leaner run object. - FULL: This view mode displays all the tasks for this run with all its fields populated. + :type view: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code @@ -493,7 +1031,8 @@ def run_service_get_run_with_http_info(self, run_id, **kwargs): # noqa: E501 all_params = [ 'run_id', - 'experiment_id' + 'experiment_id', + 'view' ] all_params.extend( [ @@ -526,6 +1065,8 @@ def run_service_get_run_with_http_info(self, run_id, **kwargs): # noqa: E501 query_params = [] if 'experiment_id' in local_var_params and local_var_params['experiment_id'] is not None: # noqa: E501 query_params.append(('experiment_id', local_var_params['experiment_id'])) # noqa: E501 + if 'view' in local_var_params and local_var_params['view'] is not None: # noqa: E501 + query_params.append(('view', local_var_params['view'])) # noqa: E501 header_params = {} @@ -577,6 +1118,8 @@ def run_service_list_runs(self, **kwargs): # noqa: E501 :type sort_by: str :param filter: A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). :type filter: str + :param view: Optional view mode. This field can be used to adjust how detailed the Run object that is returned will be. - DEFAULT: By default `tasks` field is omitted. This provides a faster and leaner run object. - FULL: This view mode displays all the tasks for this run with all its fields populated. + :type view: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _preload_content: if False, the urllib3.HTTPResponse object will @@ -615,6 +1158,8 @@ def run_service_list_runs_with_http_info(self, **kwargs): # noqa: E501 :type sort_by: str :param filter: A url-encoded, JSON-serialized Filter protocol buffer (see [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). :type filter: str + :param view: Optional view mode. This field can be used to adjust how detailed the Run object that is returned will be. - DEFAULT: By default `tasks` field is omitted. This provides a faster and leaner run object. - FULL: This view mode displays all the tasks for this run with all its fields populated. + :type view: str :param async_req: Whether to execute the request asynchronously. :type async_req: bool, optional :param _return_http_data_only: response data without head status code @@ -642,7 +1187,8 @@ def run_service_list_runs_with_http_info(self, **kwargs): # noqa: E501 'page_token', 'page_size', 'sort_by', - 'filter' + 'filter', + 'view' ] all_params.extend( [ @@ -679,6 +1225,8 @@ def run_service_list_runs_with_http_info(self, **kwargs): # noqa: E501 query_params.append(('sort_by', local_var_params['sort_by'])) # noqa: E501 if 'filter' in local_var_params and local_var_params['filter'] is not None: # noqa: E501 query_params.append(('filter', local_var_params['filter'])) # noqa: E501 + if 'view' in local_var_params and local_var_params['view'] is not None: # noqa: E501 + query_params.append(('view', local_var_params['view'])) # noqa: E501 header_params = {} @@ -1246,3 +1794,140 @@ def run_service_unarchive_run_with_http_info(self, run_id, **kwargs): # noqa: E _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) + + def update_task(self, task_id, task, **kwargs): # noqa: E501 + """Updates an existing task. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.update_task(task_id, task, async_req=True) + >>> result = thread.get() + + :param task_id: (required) + :type task_id: str + :param task: (required) + :type task: V2beta1PipelineTaskDetail + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: V2beta1PipelineTaskDetail + """ + kwargs['_return_http_data_only'] = True + return self.update_task_with_http_info(task_id, task, **kwargs) # noqa: E501 + + def update_task_with_http_info(self, task_id, task, **kwargs): # noqa: E501 + """Updates an existing task. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.update_task_with_http_info(task_id, task, async_req=True) + >>> result = thread.get() + + :param task_id: (required) + :type task_id: str + :param task: (required) + :type task: V2beta1PipelineTaskDetail + :param async_req: Whether to execute the request asynchronously. + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: Returns the result object. + If the method is called asynchronously, + returns the request thread. + :rtype: tuple(V2beta1PipelineTaskDetail, status_code(int), headers(HTTPHeaderDict)) + """ + + local_var_params = locals() + + all_params = [ + 'task_id', + 'task' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method update_task" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'task_id' is set + if self.api_client.client_side_validation and ('task_id' not in local_var_params or # noqa: E501 + local_var_params['task_id'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `task_id` when calling `update_task`") # noqa: E501 + # verify the required parameter 'task' is set + if self.api_client.client_side_validation and ('task' not in local_var_params or # noqa: E501 + local_var_params['task'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `task` when calling `update_task`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'task_id' in local_var_params: + path_params['task_id'] = local_var_params['task_id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + if 'task' in local_var_params: + body_params = local_var_params['task'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['Bearer'] # noqa: E501 + + return self.api_client.call_api( + '/apis/v2beta1/tasks/{task_id}', 'PATCH', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='V2beta1PipelineTaskDetail', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/__init__.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/__init__.py index 298b31c0029..9f0ee7ad31b 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/models/__init__.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/__init__.py @@ -14,32 +14,56 @@ from __future__ import absolute_import # import models into model package +from kfp_server_api.models.artifact_artifact_type import ArtifactArtifactType from kfp_server_api.models.authorize_request_resources import AuthorizeRequestResources from kfp_server_api.models.authorize_request_verb import AuthorizeRequestVerb from kfp_server_api.models.googlerpc_status import GooglerpcStatus +from kfp_server_api.models.input_outputs_io_artifact import InputOutputsIOArtifact +from kfp_server_api.models.input_outputs_io_parameter import InputOutputsIOParameter from kfp_server_api.models.pipeline_task_detail_child_task import PipelineTaskDetailChildTask +from kfp_server_api.models.pipeline_task_detail_input_outputs import PipelineTaskDetailInputOutputs +from kfp_server_api.models.pipeline_task_detail_status_metadata import PipelineTaskDetailStatusMetadata +from kfp_server_api.models.pipeline_task_detail_task_pod import PipelineTaskDetailTaskPod +from kfp_server_api.models.pipeline_task_detail_task_pod_type import PipelineTaskDetailTaskPodType +from kfp_server_api.models.pipeline_task_detail_task_state import PipelineTaskDetailTaskState +from kfp_server_api.models.pipeline_task_detail_task_status import PipelineTaskDetailTaskStatus +from kfp_server_api.models.pipeline_task_detail_task_type import PipelineTaskDetailTaskType +from kfp_server_api.models.pipeline_task_detail_type_attributes import PipelineTaskDetailTypeAttributes from kfp_server_api.models.predicate_int_values import PredicateIntValues from kfp_server_api.models.predicate_long_values import PredicateLongValues from kfp_server_api.models.predicate_string_values import PredicateStringValues from kfp_server_api.models.protobuf_any import ProtobufAny from kfp_server_api.models.protobuf_null_value import ProtobufNullValue from kfp_server_api.models.recurring_run_mode import RecurringRunMode -from kfp_server_api.models.v2beta1_artifact_list import V2beta1ArtifactList +from kfp_server_api.models.v2beta1_artifact import V2beta1Artifact +from kfp_server_api.models.v2beta1_artifact_task import V2beta1ArtifactTask +from kfp_server_api.models.v2beta1_create_artifact_request import V2beta1CreateArtifactRequest +from kfp_server_api.models.v2beta1_create_artifact_task_request import V2beta1CreateArtifactTaskRequest +from kfp_server_api.models.v2beta1_create_artifact_tasks_bulk_request import V2beta1CreateArtifactTasksBulkRequest +from kfp_server_api.models.v2beta1_create_artifact_tasks_bulk_response import V2beta1CreateArtifactTasksBulkResponse +from kfp_server_api.models.v2beta1_create_artifacts_bulk_request import V2beta1CreateArtifactsBulkRequest +from kfp_server_api.models.v2beta1_create_artifacts_bulk_response import V2beta1CreateArtifactsBulkResponse from kfp_server_api.models.v2beta1_create_pipeline_and_version_request import V2beta1CreatePipelineAndVersionRequest from kfp_server_api.models.v2beta1_cron_schedule import V2beta1CronSchedule from kfp_server_api.models.v2beta1_experiment import V2beta1Experiment from kfp_server_api.models.v2beta1_experiment_storage_state import V2beta1ExperimentStorageState from kfp_server_api.models.v2beta1_filter import V2beta1Filter from kfp_server_api.models.v2beta1_get_healthz_response import V2beta1GetHealthzResponse +from kfp_server_api.models.v2beta1_get_run_request_view_mode import V2beta1GetRunRequestViewMode +from kfp_server_api.models.v2beta1_io_producer import V2beta1IOProducer +from kfp_server_api.models.v2beta1_io_type import V2beta1IOType +from kfp_server_api.models.v2beta1_list_artifact_response import V2beta1ListArtifactResponse +from kfp_server_api.models.v2beta1_list_artifact_tasks_response import V2beta1ListArtifactTasksResponse from kfp_server_api.models.v2beta1_list_experiments_response import V2beta1ListExperimentsResponse from kfp_server_api.models.v2beta1_list_pipeline_versions_response import V2beta1ListPipelineVersionsResponse from kfp_server_api.models.v2beta1_list_pipelines_response import V2beta1ListPipelinesResponse from kfp_server_api.models.v2beta1_list_recurring_runs_response import V2beta1ListRecurringRunsResponse +from kfp_server_api.models.v2beta1_list_runs_request_view_mode import V2beta1ListRunsRequestViewMode from kfp_server_api.models.v2beta1_list_runs_response import V2beta1ListRunsResponse +from kfp_server_api.models.v2beta1_list_tasks_response import V2beta1ListTasksResponse from kfp_server_api.models.v2beta1_periodic_schedule import V2beta1PeriodicSchedule from kfp_server_api.models.v2beta1_pipeline import V2beta1Pipeline from kfp_server_api.models.v2beta1_pipeline_task_detail import V2beta1PipelineTaskDetail -from kfp_server_api.models.v2beta1_pipeline_task_executor_detail import V2beta1PipelineTaskExecutorDetail from kfp_server_api.models.v2beta1_pipeline_version import V2beta1PipelineVersion from kfp_server_api.models.v2beta1_pipeline_version_reference import V2beta1PipelineVersionReference from kfp_server_api.models.v2beta1_predicate import V2beta1Predicate @@ -54,6 +78,8 @@ from kfp_server_api.models.v2beta1_runtime_state import V2beta1RuntimeState from kfp_server_api.models.v2beta1_runtime_status import V2beta1RuntimeStatus from kfp_server_api.models.v2beta1_trigger import V2beta1Trigger +from kfp_server_api.models.v2beta1_update_tasks_bulk_request import V2beta1UpdateTasksBulkRequest +from kfp_server_api.models.v2beta1_update_tasks_bulk_response import V2beta1UpdateTasksBulkResponse from kfp_server_api.models.v2beta1_url import V2beta1Url from kfp_server_api.models.v2beta1_visualization import V2beta1Visualization from kfp_server_api.models.v2beta1_visualization_type import V2beta1VisualizationType diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/artifact_artifact_type.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/artifact_artifact_type.py new file mode 100644 index 00000000000..94a837f2532 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/artifact_artifact_type.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class ArtifactArtifactType(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + TYPE_UNSPECIFIED = "TYPE_UNSPECIFIED" + ARTIFACT = "Artifact" + MODEL = "Model" + DATASET = "Dataset" + HTML = "HTML" + MARKDOWN = "Markdown" + METRIC = "Metric" + CLASSIFICATIONMETRIC = "ClassificationMetric" + SLICEDCLASSIFICATIONMETRIC = "SlicedClassificationMetric" + + allowable_values = [TYPE_UNSPECIFIED, ARTIFACT, MODEL, DATASET, HTML, MARKDOWN, METRIC, CLASSIFICATIONMETRIC, SLICEDCLASSIFICATIONMETRIC] # noqa: E501 + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self, local_vars_configuration=None): # noqa: E501 + """ArtifactArtifactType - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + self.discriminator = None + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, ArtifactArtifactType): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, ArtifactArtifactType): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/input_outputs_io_artifact.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/input_outputs_io_artifact.py new file mode 100644 index 00000000000..33de99728a0 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/input_outputs_io_artifact.py @@ -0,0 +1,198 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class InputOutputsIOArtifact(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'artifacts': 'list[V2beta1Artifact]', + 'type': 'V2beta1IOType', + 'artifact_key': 'str', + 'producer': 'V2beta1IOProducer' + } + + attribute_map = { + 'artifacts': 'artifacts', + 'type': 'type', + 'artifact_key': 'artifact_key', + 'producer': 'producer' + } + + def __init__(self, artifacts=None, type=None, artifact_key=None, producer=None, local_vars_configuration=None): # noqa: E501 + """InputOutputsIOArtifact - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._artifacts = None + self._type = None + self._artifact_key = None + self._producer = None + self.discriminator = None + + if artifacts is not None: + self.artifacts = artifacts + if type is not None: + self.type = type + if artifact_key is not None: + self.artifact_key = artifact_key + if producer is not None: + self.producer = producer + + @property + def artifacts(self): + """Gets the artifacts of this InputOutputsIOArtifact. # noqa: E501 + + + :return: The artifacts of this InputOutputsIOArtifact. # noqa: E501 + :rtype: list[V2beta1Artifact] + """ + return self._artifacts + + @artifacts.setter + def artifacts(self, artifacts): + """Sets the artifacts of this InputOutputsIOArtifact. + + + :param artifacts: The artifacts of this InputOutputsIOArtifact. # noqa: E501 + :type artifacts: list[V2beta1Artifact] + """ + + self._artifacts = artifacts + + @property + def type(self): + """Gets the type of this InputOutputsIOArtifact. # noqa: E501 + + + :return: The type of this InputOutputsIOArtifact. # noqa: E501 + :rtype: V2beta1IOType + """ + return self._type + + @type.setter + def type(self, type): + """Sets the type of this InputOutputsIOArtifact. + + + :param type: The type of this InputOutputsIOArtifact. # noqa: E501 + :type type: V2beta1IOType + """ + + self._type = type + + @property + def artifact_key(self): + """Gets the artifact_key of this InputOutputsIOArtifact. # noqa: E501 + + + :return: The artifact_key of this InputOutputsIOArtifact. # noqa: E501 + :rtype: str + """ + return self._artifact_key + + @artifact_key.setter + def artifact_key(self, artifact_key): + """Sets the artifact_key of this InputOutputsIOArtifact. + + + :param artifact_key: The artifact_key of this InputOutputsIOArtifact. # noqa: E501 + :type artifact_key: str + """ + + self._artifact_key = artifact_key + + @property + def producer(self): + """Gets the producer of this InputOutputsIOArtifact. # noqa: E501 + + + :return: The producer of this InputOutputsIOArtifact. # noqa: E501 + :rtype: V2beta1IOProducer + """ + return self._producer + + @producer.setter + def producer(self, producer): + """Sets the producer of this InputOutputsIOArtifact. + + + :param producer: The producer of this InputOutputsIOArtifact. # noqa: E501 + :type producer: V2beta1IOProducer + """ + + self._producer = producer + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, InputOutputsIOArtifact): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, InputOutputsIOArtifact): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/input_outputs_io_parameter.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/input_outputs_io_parameter.py new file mode 100644 index 00000000000..575be77b381 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/input_outputs_io_parameter.py @@ -0,0 +1,198 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class InputOutputsIOParameter(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'value': 'object', + 'type': 'V2beta1IOType', + 'parameter_key': 'str', + 'producer': 'V2beta1IOProducer' + } + + attribute_map = { + 'value': 'value', + 'type': 'type', + 'parameter_key': 'parameter_key', + 'producer': 'producer' + } + + def __init__(self, value=None, type=None, parameter_key=None, producer=None, local_vars_configuration=None): # noqa: E501 + """InputOutputsIOParameter - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._value = None + self._type = None + self._parameter_key = None + self._producer = None + self.discriminator = None + + if value is not None: + self.value = value + if type is not None: + self.type = type + if parameter_key is not None: + self.parameter_key = parameter_key + if producer is not None: + self.producer = producer + + @property + def value(self): + """Gets the value of this InputOutputsIOParameter. # noqa: E501 + + + :return: The value of this InputOutputsIOParameter. # noqa: E501 + :rtype: object + """ + return self._value + + @value.setter + def value(self, value): + """Sets the value of this InputOutputsIOParameter. + + + :param value: The value of this InputOutputsIOParameter. # noqa: E501 + :type value: object + """ + + self._value = value + + @property + def type(self): + """Gets the type of this InputOutputsIOParameter. # noqa: E501 + + + :return: The type of this InputOutputsIOParameter. # noqa: E501 + :rtype: V2beta1IOType + """ + return self._type + + @type.setter + def type(self, type): + """Sets the type of this InputOutputsIOParameter. + + + :param type: The type of this InputOutputsIOParameter. # noqa: E501 + :type type: V2beta1IOType + """ + + self._type = type + + @property + def parameter_key(self): + """Gets the parameter_key of this InputOutputsIOParameter. # noqa: E501 + + + :return: The parameter_key of this InputOutputsIOParameter. # noqa: E501 + :rtype: str + """ + return self._parameter_key + + @parameter_key.setter + def parameter_key(self, parameter_key): + """Sets the parameter_key of this InputOutputsIOParameter. + + + :param parameter_key: The parameter_key of this InputOutputsIOParameter. # noqa: E501 + :type parameter_key: str + """ + + self._parameter_key = parameter_key + + @property + def producer(self): + """Gets the producer of this InputOutputsIOParameter. # noqa: E501 + + + :return: The producer of this InputOutputsIOParameter. # noqa: E501 + :rtype: V2beta1IOProducer + """ + return self._producer + + @producer.setter + def producer(self, producer): + """Sets the producer of this InputOutputsIOParameter. + + + :param producer: The producer of this InputOutputsIOParameter. # noqa: E501 + :type producer: V2beta1IOProducer + """ + + self._producer = producer + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, InputOutputsIOParameter): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, InputOutputsIOParameter): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_child_task.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_child_task.py index 099cd4a5b32..12a53cf9f4f 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_child_task.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_child_task.py @@ -34,28 +34,28 @@ class PipelineTaskDetailChildTask(object): """ openapi_types = { 'task_id': 'str', - 'pod_name': 'str' + 'name': 'str' } attribute_map = { 'task_id': 'task_id', - 'pod_name': 'pod_name' + 'name': 'name' } - def __init__(self, task_id=None, pod_name=None, local_vars_configuration=None): # noqa: E501 + def __init__(self, task_id=None, name=None, local_vars_configuration=None): # noqa: E501 """PipelineTaskDetailChildTask - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._task_id = None - self._pod_name = None + self._name = None self.discriminator = None if task_id is not None: self.task_id = task_id - if pod_name is not None: - self.pod_name = pod_name + if name is not None: + self.name = name @property def task_id(self): @@ -81,27 +81,25 @@ def task_id(self, task_id): self._task_id = task_id @property - def pod_name(self): - """Gets the pod_name of this PipelineTaskDetailChildTask. # noqa: E501 + def name(self): + """Gets the name of this PipelineTaskDetailChildTask. # noqa: E501 - Name of the corresponding pod assigned by the orchestration engine. Also known as node_id. # noqa: E501 - :return: The pod_name of this PipelineTaskDetailChildTask. # noqa: E501 + :return: The name of this PipelineTaskDetailChildTask. # noqa: E501 :rtype: str """ - return self._pod_name + return self._name - @pod_name.setter - def pod_name(self, pod_name): - """Sets the pod_name of this PipelineTaskDetailChildTask. + @name.setter + def name(self, name): + """Sets the name of this PipelineTaskDetailChildTask. - Name of the corresponding pod assigned by the orchestration engine. Also known as node_id. # noqa: E501 - :param pod_name: The pod_name of this PipelineTaskDetailChildTask. # noqa: E501 - :type pod_name: str + :param name: The name of this PipelineTaskDetailChildTask. # noqa: E501 + :type name: str """ - self._pod_name = pod_name + self._name = name def to_dict(self): """Returns the model properties as a dict""" diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_input_outputs.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_input_outputs.py new file mode 100644 index 00000000000..a72556826c8 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_input_outputs.py @@ -0,0 +1,148 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class PipelineTaskDetailInputOutputs(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'parameters': 'list[InputOutputsIOParameter]', + 'artifacts': 'list[InputOutputsIOArtifact]' + } + + attribute_map = { + 'parameters': 'parameters', + 'artifacts': 'artifacts' + } + + def __init__(self, parameters=None, artifacts=None, local_vars_configuration=None): # noqa: E501 + """PipelineTaskDetailInputOutputs - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._parameters = None + self._artifacts = None + self.discriminator = None + + if parameters is not None: + self.parameters = parameters + if artifacts is not None: + self.artifacts = artifacts + + @property + def parameters(self): + """Gets the parameters of this PipelineTaskDetailInputOutputs. # noqa: E501 + + + :return: The parameters of this PipelineTaskDetailInputOutputs. # noqa: E501 + :rtype: list[InputOutputsIOParameter] + """ + return self._parameters + + @parameters.setter + def parameters(self, parameters): + """Sets the parameters of this PipelineTaskDetailInputOutputs. + + + :param parameters: The parameters of this PipelineTaskDetailInputOutputs. # noqa: E501 + :type parameters: list[InputOutputsIOParameter] + """ + + self._parameters = parameters + + @property + def artifacts(self): + """Gets the artifacts of this PipelineTaskDetailInputOutputs. # noqa: E501 + + Output Only. To create Artifacts for a task use ArtifactTasks to link artifacts to tasks. # noqa: E501 + + :return: The artifacts of this PipelineTaskDetailInputOutputs. # noqa: E501 + :rtype: list[InputOutputsIOArtifact] + """ + return self._artifacts + + @artifacts.setter + def artifacts(self, artifacts): + """Sets the artifacts of this PipelineTaskDetailInputOutputs. + + Output Only. To create Artifacts for a task use ArtifactTasks to link artifacts to tasks. # noqa: E501 + + :param artifacts: The artifacts of this PipelineTaskDetailInputOutputs. # noqa: E501 + :type artifacts: list[InputOutputsIOArtifact] + """ + + self._artifacts = artifacts + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, PipelineTaskDetailInputOutputs): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, PipelineTaskDetailInputOutputs): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_status_metadata.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_status_metadata.py new file mode 100644 index 00000000000..04f77dd0917 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_status_metadata.py @@ -0,0 +1,150 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class PipelineTaskDetailStatusMetadata(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'message': 'str', + 'custom_properties': 'dict(str, object)' + } + + attribute_map = { + 'message': 'message', + 'custom_properties': 'custom_properties' + } + + def __init__(self, message=None, custom_properties=None, local_vars_configuration=None): # noqa: E501 + """PipelineTaskDetailStatusMetadata - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._message = None + self._custom_properties = None + self.discriminator = None + + if message is not None: + self.message = message + if custom_properties is not None: + self.custom_properties = custom_properties + + @property + def message(self): + """Gets the message of this PipelineTaskDetailStatusMetadata. # noqa: E501 + + KFP Backend will populate this field with error messages if any are available on a Failed task. # noqa: E501 + + :return: The message of this PipelineTaskDetailStatusMetadata. # noqa: E501 + :rtype: str + """ + return self._message + + @message.setter + def message(self, message): + """Sets the message of this PipelineTaskDetailStatusMetadata. + + KFP Backend will populate this field with error messages if any are available on a Failed task. # noqa: E501 + + :param message: The message of this PipelineTaskDetailStatusMetadata. # noqa: E501 + :type message: str + """ + + self._message = message + + @property + def custom_properties(self): + """Gets the custom_properties of this PipelineTaskDetailStatusMetadata. # noqa: E501 + + Custom status metadata, this can be used to provide additional status info for a given task during runtime This is currently not utilized by KFP backend. # noqa: E501 + + :return: The custom_properties of this PipelineTaskDetailStatusMetadata. # noqa: E501 + :rtype: dict(str, object) + """ + return self._custom_properties + + @custom_properties.setter + def custom_properties(self, custom_properties): + """Sets the custom_properties of this PipelineTaskDetailStatusMetadata. + + Custom status metadata, this can be used to provide additional status info for a given task during runtime This is currently not utilized by KFP backend. # noqa: E501 + + :param custom_properties: The custom_properties of this PipelineTaskDetailStatusMetadata. # noqa: E501 + :type custom_properties: dict(str, object) + """ + + self._custom_properties = custom_properties + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, PipelineTaskDetailStatusMetadata): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, PipelineTaskDetailStatusMetadata): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_task_pod.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_task_pod.py new file mode 100644 index 00000000000..279e3caadb1 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_task_pod.py @@ -0,0 +1,172 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class PipelineTaskDetailTaskPod(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'name': 'str', + 'uid': 'str', + 'type': 'PipelineTaskDetailTaskPodType' + } + + attribute_map = { + 'name': 'name', + 'uid': 'uid', + 'type': 'type' + } + + def __init__(self, name=None, uid=None, type=None, local_vars_configuration=None): # noqa: E501 + """PipelineTaskDetailTaskPod - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._name = None + self._uid = None + self._type = None + self.discriminator = None + + if name is not None: + self.name = name + if uid is not None: + self.uid = uid + if type is not None: + self.type = type + + @property + def name(self): + """Gets the name of this PipelineTaskDetailTaskPod. # noqa: E501 + + + :return: The name of this PipelineTaskDetailTaskPod. # noqa: E501 + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this PipelineTaskDetailTaskPod. + + + :param name: The name of this PipelineTaskDetailTaskPod. # noqa: E501 + :type name: str + """ + + self._name = name + + @property + def uid(self): + """Gets the uid of this PipelineTaskDetailTaskPod. # noqa: E501 + + + :return: The uid of this PipelineTaskDetailTaskPod. # noqa: E501 + :rtype: str + """ + return self._uid + + @uid.setter + def uid(self, uid): + """Sets the uid of this PipelineTaskDetailTaskPod. + + + :param uid: The uid of this PipelineTaskDetailTaskPod. # noqa: E501 + :type uid: str + """ + + self._uid = uid + + @property + def type(self): + """Gets the type of this PipelineTaskDetailTaskPod. # noqa: E501 + + + :return: The type of this PipelineTaskDetailTaskPod. # noqa: E501 + :rtype: PipelineTaskDetailTaskPodType + """ + return self._type + + @type.setter + def type(self, type): + """Sets the type of this PipelineTaskDetailTaskPod. + + + :param type: The type of this PipelineTaskDetailTaskPod. # noqa: E501 + :type type: PipelineTaskDetailTaskPodType + """ + + self._type = type + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, PipelineTaskDetailTaskPod): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, PipelineTaskDetailTaskPod): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_task_pod_type.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_task_pod_type.py new file mode 100644 index 00000000000..8324f070915 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_task_pod_type.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class PipelineTaskDetailTaskPodType(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + UNSPECIFIED = "UNSPECIFIED" + DRIVER = "DRIVER" + EXECUTOR = "EXECUTOR" + + allowable_values = [UNSPECIFIED, DRIVER, EXECUTOR] # noqa: E501 + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self, local_vars_configuration=None): # noqa: E501 + """PipelineTaskDetailTaskPodType - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + self.discriminator = None + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, PipelineTaskDetailTaskPodType): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, PipelineTaskDetailTaskPodType): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_task_state.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_task_state.py new file mode 100644 index 00000000000..385db2f46fe --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_task_state.py @@ -0,0 +1,104 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class PipelineTaskDetailTaskState(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + RUNTIME_STATE_UNSPECIFIED = "RUNTIME_STATE_UNSPECIFIED" + RUNNING = "RUNNING" + SUCCEEDED = "SUCCEEDED" + SKIPPED = "SKIPPED" + FAILED = "FAILED" + CACHED = "CACHED" + + allowable_values = [RUNTIME_STATE_UNSPECIFIED, RUNNING, SUCCEEDED, SKIPPED, FAILED, CACHED] # noqa: E501 + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self, local_vars_configuration=None): # noqa: E501 + """PipelineTaskDetailTaskState - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + self.discriminator = None + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, PipelineTaskDetailTaskState): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, PipelineTaskDetailTaskState): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_task_status.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_task_status.py new file mode 100644 index 00000000000..f217c8ca5c3 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_task_status.py @@ -0,0 +1,172 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class PipelineTaskDetailTaskStatus(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'update_time': 'datetime', + 'state': 'PipelineTaskDetailTaskState', + 'error': 'GooglerpcStatus' + } + + attribute_map = { + 'update_time': 'update_time', + 'state': 'state', + 'error': 'error' + } + + def __init__(self, update_time=None, state=None, error=None, local_vars_configuration=None): # noqa: E501 + """PipelineTaskDetailTaskStatus - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._update_time = None + self._state = None + self._error = None + self.discriminator = None + + if update_time is not None: + self.update_time = update_time + if state is not None: + self.state = state + if error is not None: + self.error = error + + @property + def update_time(self): + """Gets the update_time of this PipelineTaskDetailTaskStatus. # noqa: E501 + + + :return: The update_time of this PipelineTaskDetailTaskStatus. # noqa: E501 + :rtype: datetime + """ + return self._update_time + + @update_time.setter + def update_time(self, update_time): + """Sets the update_time of this PipelineTaskDetailTaskStatus. + + + :param update_time: The update_time of this PipelineTaskDetailTaskStatus. # noqa: E501 + :type update_time: datetime + """ + + self._update_time = update_time + + @property + def state(self): + """Gets the state of this PipelineTaskDetailTaskStatus. # noqa: E501 + + + :return: The state of this PipelineTaskDetailTaskStatus. # noqa: E501 + :rtype: PipelineTaskDetailTaskState + """ + return self._state + + @state.setter + def state(self, state): + """Sets the state of this PipelineTaskDetailTaskStatus. + + + :param state: The state of this PipelineTaskDetailTaskStatus. # noqa: E501 + :type state: PipelineTaskDetailTaskState + """ + + self._state = state + + @property + def error(self): + """Gets the error of this PipelineTaskDetailTaskStatus. # noqa: E501 + + + :return: The error of this PipelineTaskDetailTaskStatus. # noqa: E501 + :rtype: GooglerpcStatus + """ + return self._error + + @error.setter + def error(self, error): + """Sets the error of this PipelineTaskDetailTaskStatus. + + + :param error: The error of this PipelineTaskDetailTaskStatus. # noqa: E501 + :type error: GooglerpcStatus + """ + + self._error = error + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, PipelineTaskDetailTaskStatus): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, PipelineTaskDetailTaskStatus): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_task_type.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_task_type.py new file mode 100644 index 00000000000..52a2ded3e8d --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_task_type.py @@ -0,0 +1,106 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class PipelineTaskDetailTaskType(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + ROOT = "ROOT" + RUNTIME = "RUNTIME" + CONDITION_BRANCH = "CONDITION_BRANCH" + CONDITION = "CONDITION" + LOOP = "LOOP" + EXIT_HANDLER = "EXIT_HANDLER" + IMPORTER = "IMPORTER" + DAG = "DAG" + + allowable_values = [ROOT, RUNTIME, CONDITION_BRANCH, CONDITION, LOOP, EXIT_HANDLER, IMPORTER, DAG] # noqa: E501 + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self, local_vars_configuration=None): # noqa: E501 + """PipelineTaskDetailTaskType - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + self.discriminator = None + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, PipelineTaskDetailTaskType): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, PipelineTaskDetailTaskType): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_type_attributes.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_type_attributes.py new file mode 100644 index 00000000000..1bcda9763d3 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/pipeline_task_detail_type_attributes.py @@ -0,0 +1,146 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class PipelineTaskDetailTypeAttributes(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'iteration_index': 'str', + 'iteration_count': 'str' + } + + attribute_map = { + 'iteration_index': 'iteration_index', + 'iteration_count': 'iteration_count' + } + + def __init__(self, iteration_index=None, iteration_count=None, local_vars_configuration=None): # noqa: E501 + """PipelineTaskDetailTypeAttributes - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._iteration_index = None + self._iteration_count = None + self.discriminator = None + + if iteration_index is not None: + self.iteration_index = iteration_index + if iteration_count is not None: + self.iteration_count = iteration_count + + @property + def iteration_index(self): + """Gets the iteration_index of this PipelineTaskDetailTypeAttributes. # noqa: E501 + + + :return: The iteration_index of this PipelineTaskDetailTypeAttributes. # noqa: E501 + :rtype: str + """ + return self._iteration_index + + @iteration_index.setter + def iteration_index(self, iteration_index): + """Sets the iteration_index of this PipelineTaskDetailTypeAttributes. + + + :param iteration_index: The iteration_index of this PipelineTaskDetailTypeAttributes. # noqa: E501 + :type iteration_index: str + """ + + self._iteration_index = iteration_index + + @property + def iteration_count(self): + """Gets the iteration_count of this PipelineTaskDetailTypeAttributes. # noqa: E501 + + + :return: The iteration_count of this PipelineTaskDetailTypeAttributes. # noqa: E501 + :rtype: str + """ + return self._iteration_count + + @iteration_count.setter + def iteration_count(self, iteration_count): + """Sets the iteration_count of this PipelineTaskDetailTypeAttributes. + + + :param iteration_count: The iteration_count of this PipelineTaskDetailTypeAttributes. # noqa: E501 + :type iteration_count: str + """ + + self._iteration_count = iteration_count + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, PipelineTaskDetailTypeAttributes): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, PipelineTaskDetailTypeAttributes): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_artifact.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_artifact.py new file mode 100644 index 00000000000..1baab0439e2 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_artifact.py @@ -0,0 +1,336 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class V2beta1Artifact(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'artifact_id': 'str', + 'name': 'str', + 'description': 'str', + 'type': 'ArtifactArtifactType', + 'uri': 'str', + 'metadata': 'dict(str, object)', + 'number_value': 'float', + 'created_at': 'datetime', + 'namespace': 'str' + } + + attribute_map = { + 'artifact_id': 'artifact_id', + 'name': 'name', + 'description': 'description', + 'type': 'type', + 'uri': 'uri', + 'metadata': 'metadata', + 'number_value': 'number_value', + 'created_at': 'created_at', + 'namespace': 'namespace' + } + + def __init__(self, artifact_id=None, name=None, description=None, type=None, uri=None, metadata=None, number_value=None, created_at=None, namespace=None, local_vars_configuration=None): # noqa: E501 + """V2beta1Artifact - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._artifact_id = None + self._name = None + self._description = None + self._type = None + self._uri = None + self._metadata = None + self._number_value = None + self._created_at = None + self._namespace = None + self.discriminator = None + + if artifact_id is not None: + self.artifact_id = artifact_id + if name is not None: + self.name = name + if description is not None: + self.description = description + if type is not None: + self.type = type + if uri is not None: + self.uri = uri + if metadata is not None: + self.metadata = metadata + if number_value is not None: + self.number_value = number_value + if created_at is not None: + self.created_at = created_at + if namespace is not None: + self.namespace = namespace + + @property + def artifact_id(self): + """Gets the artifact_id of this V2beta1Artifact. # noqa: E501 + + + :return: The artifact_id of this V2beta1Artifact. # noqa: E501 + :rtype: str + """ + return self._artifact_id + + @artifact_id.setter + def artifact_id(self, artifact_id): + """Sets the artifact_id of this V2beta1Artifact. + + + :param artifact_id: The artifact_id of this V2beta1Artifact. # noqa: E501 + :type artifact_id: str + """ + + self._artifact_id = artifact_id + + @property + def name(self): + """Gets the name of this V2beta1Artifact. # noqa: E501 + + Required. The client provided name of the artifact. Note: in MLMD when name was set, it had to be unique for that type_id this restriction is removed here If this is a \"Metric\" artifact, the name of the metric is treated as the Key in its K/V pair. # noqa: E501 + + :return: The name of this V2beta1Artifact. # noqa: E501 + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this V2beta1Artifact. + + Required. The client provided name of the artifact. Note: in MLMD when name was set, it had to be unique for that type_id this restriction is removed here If this is a \"Metric\" artifact, the name of the metric is treated as the Key in its K/V pair. # noqa: E501 + + :param name: The name of this V2beta1Artifact. # noqa: E501 + :type name: str + """ + + self._name = name + + @property + def description(self): + """Gets the description of this V2beta1Artifact. # noqa: E501 + + + :return: The description of this V2beta1Artifact. # noqa: E501 + :rtype: str + """ + return self._description + + @description.setter + def description(self, description): + """Sets the description of this V2beta1Artifact. + + + :param description: The description of this V2beta1Artifact. # noqa: E501 + :type description: str + """ + + self._description = description + + @property + def type(self): + """Gets the type of this V2beta1Artifact. # noqa: E501 + + + :return: The type of this V2beta1Artifact. # noqa: E501 + :rtype: ArtifactArtifactType + """ + return self._type + + @type.setter + def type(self, type): + """Sets the type of this V2beta1Artifact. + + + :param type: The type of this V2beta1Artifact. # noqa: E501 + :type type: ArtifactArtifactType + """ + + self._type = type + + @property + def uri(self): + """Gets the uri of this V2beta1Artifact. # noqa: E501 + + The uniform resource identifier of the physical artifact. May be empty if there is no physical artifact. # noqa: E501 + + :return: The uri of this V2beta1Artifact. # noqa: E501 + :rtype: str + """ + return self._uri + + @uri.setter + def uri(self, uri): + """Sets the uri of this V2beta1Artifact. + + The uniform resource identifier of the physical artifact. May be empty if there is no physical artifact. # noqa: E501 + + :param uri: The uri of this V2beta1Artifact. # noqa: E501 + :type uri: str + """ + + self._uri = uri + + @property + def metadata(self): + """Gets the metadata of this V2beta1Artifact. # noqa: E501 + + Optional. User provided custom properties which are not defined by its type. # noqa: E501 + + :return: The metadata of this V2beta1Artifact. # noqa: E501 + :rtype: dict(str, object) + """ + return self._metadata + + @metadata.setter + def metadata(self, metadata): + """Sets the metadata of this V2beta1Artifact. + + Optional. User provided custom properties which are not defined by its type. # noqa: E501 + + :param metadata: The metadata of this V2beta1Artifact. # noqa: E501 + :type metadata: dict(str, object) + """ + + self._metadata = metadata + + @property + def number_value(self): + """Gets the number_value of this V2beta1Artifact. # noqa: E501 + + + :return: The number_value of this V2beta1Artifact. # noqa: E501 + :rtype: float + """ + return self._number_value + + @number_value.setter + def number_value(self, number_value): + """Sets the number_value of this V2beta1Artifact. + + + :param number_value: The number_value of this V2beta1Artifact. # noqa: E501 + :type number_value: float + """ + + self._number_value = number_value + + @property + def created_at(self): + """Gets the created_at of this V2beta1Artifact. # noqa: E501 + + Output only. Create time of the artifact in millisecond since epoch. Note: The type and name is updated from mlmd artifact to be consistent with other backend apis. # noqa: E501 + + :return: The created_at of this V2beta1Artifact. # noqa: E501 + :rtype: datetime + """ + return self._created_at + + @created_at.setter + def created_at(self, created_at): + """Sets the created_at of this V2beta1Artifact. + + Output only. Create time of the artifact in millisecond since epoch. Note: The type and name is updated from mlmd artifact to be consistent with other backend apis. # noqa: E501 + + :param created_at: The created_at of this V2beta1Artifact. # noqa: E501 + :type created_at: datetime + """ + + self._created_at = created_at + + @property + def namespace(self): + """Gets the namespace of this V2beta1Artifact. # noqa: E501 + + + :return: The namespace of this V2beta1Artifact. # noqa: E501 + :rtype: str + """ + return self._namespace + + @namespace.setter + def namespace(self, namespace): + """Sets the namespace of this V2beta1Artifact. + + + :param namespace: The namespace of this V2beta1Artifact. # noqa: E501 + :type namespace: str + """ + + self._namespace = namespace + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, V2beta1Artifact): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, V2beta1Artifact): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_artifact_task.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_artifact_task.py new file mode 100644 index 00000000000..877c71cb255 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_artifact_task.py @@ -0,0 +1,278 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class V2beta1ArtifactTask(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'id': 'str', + 'artifact_id': 'str', + 'run_id': 'str', + 'task_id': 'str', + 'type': 'V2beta1IOType', + 'producer': 'V2beta1IOProducer', + 'key': 'str' + } + + attribute_map = { + 'id': 'id', + 'artifact_id': 'artifact_id', + 'run_id': 'run_id', + 'task_id': 'task_id', + 'type': 'type', + 'producer': 'producer', + 'key': 'key' + } + + def __init__(self, id=None, artifact_id=None, run_id=None, task_id=None, type=None, producer=None, key=None, local_vars_configuration=None): # noqa: E501 + """V2beta1ArtifactTask - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._id = None + self._artifact_id = None + self._run_id = None + self._task_id = None + self._type = None + self._producer = None + self._key = None + self.discriminator = None + + if id is not None: + self.id = id + if artifact_id is not None: + self.artifact_id = artifact_id + if run_id is not None: + self.run_id = run_id + if task_id is not None: + self.task_id = task_id + if type is not None: + self.type = type + if producer is not None: + self.producer = producer + if key is not None: + self.key = key + + @property + def id(self): + """Gets the id of this V2beta1ArtifactTask. # noqa: E501 + + Output only. The unique server generated id of the ArtifactTask. # noqa: E501 + + :return: The id of this V2beta1ArtifactTask. # noqa: E501 + :rtype: str + """ + return self._id + + @id.setter + def id(self, id): + """Sets the id of this V2beta1ArtifactTask. + + Output only. The unique server generated id of the ArtifactTask. # noqa: E501 + + :param id: The id of this V2beta1ArtifactTask. # noqa: E501 + :type id: str + """ + + self._id = id + + @property + def artifact_id(self): + """Gets the artifact_id of this V2beta1ArtifactTask. # noqa: E501 + + + :return: The artifact_id of this V2beta1ArtifactTask. # noqa: E501 + :rtype: str + """ + return self._artifact_id + + @artifact_id.setter + def artifact_id(self, artifact_id): + """Sets the artifact_id of this V2beta1ArtifactTask. + + + :param artifact_id: The artifact_id of this V2beta1ArtifactTask. # noqa: E501 + :type artifact_id: str + """ + + self._artifact_id = artifact_id + + @property + def run_id(self): + """Gets the run_id of this V2beta1ArtifactTask. # noqa: E501 + + + :return: The run_id of this V2beta1ArtifactTask. # noqa: E501 + :rtype: str + """ + return self._run_id + + @run_id.setter + def run_id(self, run_id): + """Sets the run_id of this V2beta1ArtifactTask. + + + :param run_id: The run_id of this V2beta1ArtifactTask. # noqa: E501 + :type run_id: str + """ + + self._run_id = run_id + + @property + def task_id(self): + """Gets the task_id of this V2beta1ArtifactTask. # noqa: E501 + + + :return: The task_id of this V2beta1ArtifactTask. # noqa: E501 + :rtype: str + """ + return self._task_id + + @task_id.setter + def task_id(self, task_id): + """Sets the task_id of this V2beta1ArtifactTask. + + + :param task_id: The task_id of this V2beta1ArtifactTask. # noqa: E501 + :type task_id: str + """ + + self._task_id = task_id + + @property + def type(self): + """Gets the type of this V2beta1ArtifactTask. # noqa: E501 + + + :return: The type of this V2beta1ArtifactTask. # noqa: E501 + :rtype: V2beta1IOType + """ + return self._type + + @type.setter + def type(self, type): + """Sets the type of this V2beta1ArtifactTask. + + + :param type: The type of this V2beta1ArtifactTask. # noqa: E501 + :type type: V2beta1IOType + """ + + self._type = type + + @property + def producer(self): + """Gets the producer of this V2beta1ArtifactTask. # noqa: E501 + + + :return: The producer of this V2beta1ArtifactTask. # noqa: E501 + :rtype: V2beta1IOProducer + """ + return self._producer + + @producer.setter + def producer(self, producer): + """Sets the producer of this V2beta1ArtifactTask. + + + :param producer: The producer of this V2beta1ArtifactTask. # noqa: E501 + :type producer: V2beta1IOProducer + """ + + self._producer = producer + + @property + def key(self): + """Gets the key of this V2beta1ArtifactTask. # noqa: E501 + + + :return: The key of this V2beta1ArtifactTask. # noqa: E501 + :rtype: str + """ + return self._key + + @key.setter + def key(self, key): + """Sets the key of this V2beta1ArtifactTask. + + + :param key: The key of this V2beta1ArtifactTask. # noqa: E501 + :type key: str + """ + + self._key = key + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, V2beta1ArtifactTask): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, V2beta1ArtifactTask): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_create_artifact_request.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_create_artifact_request.py new file mode 100644 index 00000000000..37405242552 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_create_artifact_request.py @@ -0,0 +1,254 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class V2beta1CreateArtifactRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'artifact': 'V2beta1Artifact', + 'run_id': 'str', + 'task_id': 'str', + 'producer_key': 'str', + 'iteration_index': 'str', + 'type': 'V2beta1IOType' + } + + attribute_map = { + 'artifact': 'artifact', + 'run_id': 'run_id', + 'task_id': 'task_id', + 'producer_key': 'producer_key', + 'iteration_index': 'iteration_index', + 'type': 'type' + } + + def __init__(self, artifact=None, run_id=None, task_id=None, producer_key=None, iteration_index=None, type=None, local_vars_configuration=None): # noqa: E501 + """V2beta1CreateArtifactRequest - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._artifact = None + self._run_id = None + self._task_id = None + self._producer_key = None + self._iteration_index = None + self._type = None + self.discriminator = None + + if artifact is not None: + self.artifact = artifact + if run_id is not None: + self.run_id = run_id + if task_id is not None: + self.task_id = task_id + if producer_key is not None: + self.producer_key = producer_key + if iteration_index is not None: + self.iteration_index = iteration_index + if type is not None: + self.type = type + + @property + def artifact(self): + """Gets the artifact of this V2beta1CreateArtifactRequest. # noqa: E501 + + + :return: The artifact of this V2beta1CreateArtifactRequest. # noqa: E501 + :rtype: V2beta1Artifact + """ + return self._artifact + + @artifact.setter + def artifact(self, artifact): + """Sets the artifact of this V2beta1CreateArtifactRequest. + + + :param artifact: The artifact of this V2beta1CreateArtifactRequest. # noqa: E501 + :type artifact: V2beta1Artifact + """ + + self._artifact = artifact + + @property + def run_id(self): + """Gets the run_id of this V2beta1CreateArtifactRequest. # noqa: E501 + + An artifact is always created in the context of a run. # noqa: E501 + + :return: The run_id of this V2beta1CreateArtifactRequest. # noqa: E501 + :rtype: str + """ + return self._run_id + + @run_id.setter + def run_id(self, run_id): + """Sets the run_id of this V2beta1CreateArtifactRequest. + + An artifact is always created in the context of a run. # noqa: E501 + + :param run_id: The run_id of this V2beta1CreateArtifactRequest. # noqa: E501 + :type run_id: str + """ + + self._run_id = run_id + + @property + def task_id(self): + """Gets the task_id of this V2beta1CreateArtifactRequest. # noqa: E501 + + The Task that is associated with the creation of this artifact. # noqa: E501 + + :return: The task_id of this V2beta1CreateArtifactRequest. # noqa: E501 + :rtype: str + """ + return self._task_id + + @task_id.setter + def task_id(self, task_id): + """Sets the task_id of this V2beta1CreateArtifactRequest. + + The Task that is associated with the creation of this artifact. # noqa: E501 + + :param task_id: The task_id of this V2beta1CreateArtifactRequest. # noqa: E501 + :type task_id: str + """ + + self._task_id = task_id + + @property + def producer_key(self): + """Gets the producer_key of this V2beta1CreateArtifactRequest. # noqa: E501 + + + :return: The producer_key of this V2beta1CreateArtifactRequest. # noqa: E501 + :rtype: str + """ + return self._producer_key + + @producer_key.setter + def producer_key(self, producer_key): + """Sets the producer_key of this V2beta1CreateArtifactRequest. + + + :param producer_key: The producer_key of this V2beta1CreateArtifactRequest. # noqa: E501 + :type producer_key: str + """ + + self._producer_key = producer_key + + @property + def iteration_index(self): + """Gets the iteration_index of this V2beta1CreateArtifactRequest. # noqa: E501 + + + :return: The iteration_index of this V2beta1CreateArtifactRequest. # noqa: E501 + :rtype: str + """ + return self._iteration_index + + @iteration_index.setter + def iteration_index(self, iteration_index): + """Sets the iteration_index of this V2beta1CreateArtifactRequest. + + + :param iteration_index: The iteration_index of this V2beta1CreateArtifactRequest. # noqa: E501 + :type iteration_index: str + """ + + self._iteration_index = iteration_index + + @property + def type(self): + """Gets the type of this V2beta1CreateArtifactRequest. # noqa: E501 + + + :return: The type of this V2beta1CreateArtifactRequest. # noqa: E501 + :rtype: V2beta1IOType + """ + return self._type + + @type.setter + def type(self, type): + """Sets the type of this V2beta1CreateArtifactRequest. + + + :param type: The type of this V2beta1CreateArtifactRequest. # noqa: E501 + :type type: V2beta1IOType + """ + + self._type = type + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, V2beta1CreateArtifactRequest): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, V2beta1CreateArtifactRequest): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_create_artifact_task_request.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_create_artifact_task_request.py new file mode 100644 index 00000000000..f1dc5142e00 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_create_artifact_task_request.py @@ -0,0 +1,120 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class V2beta1CreateArtifactTaskRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'artifact_task': 'V2beta1ArtifactTask' + } + + attribute_map = { + 'artifact_task': 'artifact_task' + } + + def __init__(self, artifact_task=None, local_vars_configuration=None): # noqa: E501 + """V2beta1CreateArtifactTaskRequest - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._artifact_task = None + self.discriminator = None + + if artifact_task is not None: + self.artifact_task = artifact_task + + @property + def artifact_task(self): + """Gets the artifact_task of this V2beta1CreateArtifactTaskRequest. # noqa: E501 + + + :return: The artifact_task of this V2beta1CreateArtifactTaskRequest. # noqa: E501 + :rtype: V2beta1ArtifactTask + """ + return self._artifact_task + + @artifact_task.setter + def artifact_task(self, artifact_task): + """Sets the artifact_task of this V2beta1CreateArtifactTaskRequest. + + + :param artifact_task: The artifact_task of this V2beta1CreateArtifactTaskRequest. # noqa: E501 + :type artifact_task: V2beta1ArtifactTask + """ + + self._artifact_task = artifact_task + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, V2beta1CreateArtifactTaskRequest): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, V2beta1CreateArtifactTaskRequest): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_create_artifact_tasks_bulk_request.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_create_artifact_tasks_bulk_request.py new file mode 100644 index 00000000000..68a17db4049 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_create_artifact_tasks_bulk_request.py @@ -0,0 +1,122 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class V2beta1CreateArtifactTasksBulkRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'artifact_tasks': 'list[V2beta1ArtifactTask]' + } + + attribute_map = { + 'artifact_tasks': 'artifact_tasks' + } + + def __init__(self, artifact_tasks=None, local_vars_configuration=None): # noqa: E501 + """V2beta1CreateArtifactTasksBulkRequest - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._artifact_tasks = None + self.discriminator = None + + if artifact_tasks is not None: + self.artifact_tasks = artifact_tasks + + @property + def artifact_tasks(self): + """Gets the artifact_tasks of this V2beta1CreateArtifactTasksBulkRequest. # noqa: E501 + + Required. The list of artifact-task relationships to create. # noqa: E501 + + :return: The artifact_tasks of this V2beta1CreateArtifactTasksBulkRequest. # noqa: E501 + :rtype: list[V2beta1ArtifactTask] + """ + return self._artifact_tasks + + @artifact_tasks.setter + def artifact_tasks(self, artifact_tasks): + """Sets the artifact_tasks of this V2beta1CreateArtifactTasksBulkRequest. + + Required. The list of artifact-task relationships to create. # noqa: E501 + + :param artifact_tasks: The artifact_tasks of this V2beta1CreateArtifactTasksBulkRequest. # noqa: E501 + :type artifact_tasks: list[V2beta1ArtifactTask] + """ + + self._artifact_tasks = artifact_tasks + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, V2beta1CreateArtifactTasksBulkRequest): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, V2beta1CreateArtifactTasksBulkRequest): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_create_artifact_tasks_bulk_response.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_create_artifact_tasks_bulk_response.py new file mode 100644 index 00000000000..5522aeb2c21 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_create_artifact_tasks_bulk_response.py @@ -0,0 +1,122 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class V2beta1CreateArtifactTasksBulkResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'artifact_tasks': 'list[V2beta1ArtifactTask]' + } + + attribute_map = { + 'artifact_tasks': 'artifact_tasks' + } + + def __init__(self, artifact_tasks=None, local_vars_configuration=None): # noqa: E501 + """V2beta1CreateArtifactTasksBulkResponse - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._artifact_tasks = None + self.discriminator = None + + if artifact_tasks is not None: + self.artifact_tasks = artifact_tasks + + @property + def artifact_tasks(self): + """Gets the artifact_tasks of this V2beta1CreateArtifactTasksBulkResponse. # noqa: E501 + + The list of created artifact-task relationships. # noqa: E501 + + :return: The artifact_tasks of this V2beta1CreateArtifactTasksBulkResponse. # noqa: E501 + :rtype: list[V2beta1ArtifactTask] + """ + return self._artifact_tasks + + @artifact_tasks.setter + def artifact_tasks(self, artifact_tasks): + """Sets the artifact_tasks of this V2beta1CreateArtifactTasksBulkResponse. + + The list of created artifact-task relationships. # noqa: E501 + + :param artifact_tasks: The artifact_tasks of this V2beta1CreateArtifactTasksBulkResponse. # noqa: E501 + :type artifact_tasks: list[V2beta1ArtifactTask] + """ + + self._artifact_tasks = artifact_tasks + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, V2beta1CreateArtifactTasksBulkResponse): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, V2beta1CreateArtifactTasksBulkResponse): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_create_artifacts_bulk_request.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_create_artifacts_bulk_request.py new file mode 100644 index 00000000000..82b0a62fae9 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_create_artifacts_bulk_request.py @@ -0,0 +1,122 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class V2beta1CreateArtifactsBulkRequest(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'artifacts': 'list[V2beta1CreateArtifactRequest]' + } + + attribute_map = { + 'artifacts': 'artifacts' + } + + def __init__(self, artifacts=None, local_vars_configuration=None): # noqa: E501 + """V2beta1CreateArtifactsBulkRequest - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._artifacts = None + self.discriminator = None + + if artifacts is not None: + self.artifacts = artifacts + + @property + def artifacts(self): + """Gets the artifacts of this V2beta1CreateArtifactsBulkRequest. # noqa: E501 + + Required. The list of artifacts to create. # noqa: E501 + + :return: The artifacts of this V2beta1CreateArtifactsBulkRequest. # noqa: E501 + :rtype: list[V2beta1CreateArtifactRequest] + """ + return self._artifacts + + @artifacts.setter + def artifacts(self, artifacts): + """Sets the artifacts of this V2beta1CreateArtifactsBulkRequest. + + Required. The list of artifacts to create. # noqa: E501 + + :param artifacts: The artifacts of this V2beta1CreateArtifactsBulkRequest. # noqa: E501 + :type artifacts: list[V2beta1CreateArtifactRequest] + """ + + self._artifacts = artifacts + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, V2beta1CreateArtifactsBulkRequest): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, V2beta1CreateArtifactsBulkRequest): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_create_artifacts_bulk_response.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_create_artifacts_bulk_response.py new file mode 100644 index 00000000000..fed8f3fd5e3 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_create_artifacts_bulk_response.py @@ -0,0 +1,122 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class V2beta1CreateArtifactsBulkResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'artifacts': 'list[V2beta1Artifact]' + } + + attribute_map = { + 'artifacts': 'artifacts' + } + + def __init__(self, artifacts=None, local_vars_configuration=None): # noqa: E501 + """V2beta1CreateArtifactsBulkResponse - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._artifacts = None + self.discriminator = None + + if artifacts is not None: + self.artifacts = artifacts + + @property + def artifacts(self): + """Gets the artifacts of this V2beta1CreateArtifactsBulkResponse. # noqa: E501 + + The list of created artifacts. # noqa: E501 + + :return: The artifacts of this V2beta1CreateArtifactsBulkResponse. # noqa: E501 + :rtype: list[V2beta1Artifact] + """ + return self._artifacts + + @artifacts.setter + def artifacts(self, artifacts): + """Sets the artifacts of this V2beta1CreateArtifactsBulkResponse. + + The list of created artifacts. # noqa: E501 + + :param artifacts: The artifacts of this V2beta1CreateArtifactsBulkResponse. # noqa: E501 + :type artifacts: list[V2beta1Artifact] + """ + + self._artifacts = artifacts + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, V2beta1CreateArtifactsBulkResponse): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, V2beta1CreateArtifactsBulkResponse): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_get_run_request_view_mode.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_get_run_request_view_mode.py new file mode 100644 index 00000000000..acd48c3a9aa --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_get_run_request_view_mode.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class V2beta1GetRunRequestViewMode(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + DEFAULT = "DEFAULT" + FULL = "FULL" + + allowable_values = [DEFAULT, FULL] # noqa: E501 + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self, local_vars_configuration=None): # noqa: E501 + """V2beta1GetRunRequestViewMode - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + self.discriminator = None + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, V2beta1GetRunRequestViewMode): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, V2beta1GetRunRequestViewMode): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_io_producer.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_io_producer.py new file mode 100644 index 00000000000..cc046c956de --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_io_producer.py @@ -0,0 +1,146 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class V2beta1IOProducer(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'task_name': 'str', + 'iteration': 'str' + } + + attribute_map = { + 'task_name': 'task_name', + 'iteration': 'iteration' + } + + def __init__(self, task_name=None, iteration=None, local_vars_configuration=None): # noqa: E501 + """V2beta1IOProducer - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._task_name = None + self._iteration = None + self.discriminator = None + + if task_name is not None: + self.task_name = task_name + if iteration is not None: + self.iteration = iteration + + @property + def task_name(self): + """Gets the task_name of this V2beta1IOProducer. # noqa: E501 + + + :return: The task_name of this V2beta1IOProducer. # noqa: E501 + :rtype: str + """ + return self._task_name + + @task_name.setter + def task_name(self, task_name): + """Sets the task_name of this V2beta1IOProducer. + + + :param task_name: The task_name of this V2beta1IOProducer. # noqa: E501 + :type task_name: str + """ + + self._task_name = task_name + + @property + def iteration(self): + """Gets the iteration of this V2beta1IOProducer. # noqa: E501 + + + :return: The iteration of this V2beta1IOProducer. # noqa: E501 + :rtype: str + """ + return self._iteration + + @iteration.setter + def iteration(self, iteration): + """Sets the iteration of this V2beta1IOProducer. + + + :param iteration: The iteration of this V2beta1IOProducer. # noqa: E501 + :type iteration: str + """ + + self._iteration = iteration + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, V2beta1IOProducer): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, V2beta1IOProducer): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_io_type.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_io_type.py new file mode 100644 index 00000000000..5c5ca47fb33 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_io_type.py @@ -0,0 +1,110 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class V2beta1IOType(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + UNSPECIFIED = "UNSPECIFIED" + COMPONENT_DEFAULT_INPUT = "COMPONENT_DEFAULT_INPUT" + TASK_OUTPUT_INPUT = "TASK_OUTPUT_INPUT" + COMPONENT_INPUT = "COMPONENT_INPUT" + RUNTIME_VALUE_INPUT = "RUNTIME_VALUE_INPUT" + COLLECTED_INPUTS = "COLLECTED_INPUTS" + ITERATOR_INPUT = "ITERATOR_INPUT" + ITERATOR_INPUT_RAW = "ITERATOR_INPUT_RAW" + ITERATOR_OUTPUT = "ITERATOR_OUTPUT" + OUTPUT = "OUTPUT" + ONE_OF_OUTPUT = "ONE_OF_OUTPUT" + TASK_FINAL_STATUS_OUTPUT = "TASK_FINAL_STATUS_OUTPUT" + + allowable_values = [UNSPECIFIED, COMPONENT_DEFAULT_INPUT, TASK_OUTPUT_INPUT, COMPONENT_INPUT, RUNTIME_VALUE_INPUT, COLLECTED_INPUTS, ITERATOR_INPUT, ITERATOR_INPUT_RAW, ITERATOR_OUTPUT, OUTPUT, ONE_OF_OUTPUT, TASK_FINAL_STATUS_OUTPUT] # noqa: E501 + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self, local_vars_configuration=None): # noqa: E501 + """V2beta1IOType - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + self.discriminator = None + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, V2beta1IOType): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, V2beta1IOType): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_list_artifact_response.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_list_artifact_response.py new file mode 100644 index 00000000000..5c684744fe1 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_list_artifact_response.py @@ -0,0 +1,178 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class V2beta1ListArtifactResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'artifacts': 'list[V2beta1Artifact]', + 'total_size': 'int', + 'next_page_token': 'str' + } + + attribute_map = { + 'artifacts': 'artifacts', + 'total_size': 'total_size', + 'next_page_token': 'next_page_token' + } + + def __init__(self, artifacts=None, total_size=None, next_page_token=None, local_vars_configuration=None): # noqa: E501 + """V2beta1ListArtifactResponse - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._artifacts = None + self._total_size = None + self._next_page_token = None + self.discriminator = None + + if artifacts is not None: + self.artifacts = artifacts + if total_size is not None: + self.total_size = total_size + if next_page_token is not None: + self.next_page_token = next_page_token + + @property + def artifacts(self): + """Gets the artifacts of this V2beta1ListArtifactResponse. # noqa: E501 + + The list of artifacts returned. # noqa: E501 + + :return: The artifacts of this V2beta1ListArtifactResponse. # noqa: E501 + :rtype: list[V2beta1Artifact] + """ + return self._artifacts + + @artifacts.setter + def artifacts(self, artifacts): + """Sets the artifacts of this V2beta1ListArtifactResponse. + + The list of artifacts returned. # noqa: E501 + + :param artifacts: The artifacts of this V2beta1ListArtifactResponse. # noqa: E501 + :type artifacts: list[V2beta1Artifact] + """ + + self._artifacts = artifacts + + @property + def total_size(self): + """Gets the total_size of this V2beta1ListArtifactResponse. # noqa: E501 + + The total number of artifacts available. This field is not always populated. # noqa: E501 + + :return: The total_size of this V2beta1ListArtifactResponse. # noqa: E501 + :rtype: int + """ + return self._total_size + + @total_size.setter + def total_size(self, total_size): + """Sets the total_size of this V2beta1ListArtifactResponse. + + The total number of artifacts available. This field is not always populated. # noqa: E501 + + :param total_size: The total_size of this V2beta1ListArtifactResponse. # noqa: E501 + :type total_size: int + """ + + self._total_size = total_size + + @property + def next_page_token(self): + """Gets the next_page_token of this V2beta1ListArtifactResponse. # noqa: E501 + + A token to retrieve the next page of results, or empty if there are no more results in the list. # noqa: E501 + + :return: The next_page_token of this V2beta1ListArtifactResponse. # noqa: E501 + :rtype: str + """ + return self._next_page_token + + @next_page_token.setter + def next_page_token(self, next_page_token): + """Sets the next_page_token of this V2beta1ListArtifactResponse. + + A token to retrieve the next page of results, or empty if there are no more results in the list. # noqa: E501 + + :param next_page_token: The next_page_token of this V2beta1ListArtifactResponse. # noqa: E501 + :type next_page_token: str + """ + + self._next_page_token = next_page_token + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, V2beta1ListArtifactResponse): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, V2beta1ListArtifactResponse): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_list_artifact_tasks_response.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_list_artifact_tasks_response.py new file mode 100644 index 00000000000..773dac6f3b7 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_list_artifact_tasks_response.py @@ -0,0 +1,172 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class V2beta1ListArtifactTasksResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'artifact_tasks': 'list[V2beta1ArtifactTask]', + 'total_size': 'int', + 'next_page_token': 'str' + } + + attribute_map = { + 'artifact_tasks': 'artifact_tasks', + 'total_size': 'total_size', + 'next_page_token': 'next_page_token' + } + + def __init__(self, artifact_tasks=None, total_size=None, next_page_token=None, local_vars_configuration=None): # noqa: E501 + """V2beta1ListArtifactTasksResponse - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._artifact_tasks = None + self._total_size = None + self._next_page_token = None + self.discriminator = None + + if artifact_tasks is not None: + self.artifact_tasks = artifact_tasks + if total_size is not None: + self.total_size = total_size + if next_page_token is not None: + self.next_page_token = next_page_token + + @property + def artifact_tasks(self): + """Gets the artifact_tasks of this V2beta1ListArtifactTasksResponse. # noqa: E501 + + + :return: The artifact_tasks of this V2beta1ListArtifactTasksResponse. # noqa: E501 + :rtype: list[V2beta1ArtifactTask] + """ + return self._artifact_tasks + + @artifact_tasks.setter + def artifact_tasks(self, artifact_tasks): + """Sets the artifact_tasks of this V2beta1ListArtifactTasksResponse. + + + :param artifact_tasks: The artifact_tasks of this V2beta1ListArtifactTasksResponse. # noqa: E501 + :type artifact_tasks: list[V2beta1ArtifactTask] + """ + + self._artifact_tasks = artifact_tasks + + @property + def total_size(self): + """Gets the total_size of this V2beta1ListArtifactTasksResponse. # noqa: E501 + + + :return: The total_size of this V2beta1ListArtifactTasksResponse. # noqa: E501 + :rtype: int + """ + return self._total_size + + @total_size.setter + def total_size(self, total_size): + """Sets the total_size of this V2beta1ListArtifactTasksResponse. + + + :param total_size: The total_size of this V2beta1ListArtifactTasksResponse. # noqa: E501 + :type total_size: int + """ + + self._total_size = total_size + + @property + def next_page_token(self): + """Gets the next_page_token of this V2beta1ListArtifactTasksResponse. # noqa: E501 + + + :return: The next_page_token of this V2beta1ListArtifactTasksResponse. # noqa: E501 + :rtype: str + """ + return self._next_page_token + + @next_page_token.setter + def next_page_token(self, next_page_token): + """Sets the next_page_token of this V2beta1ListArtifactTasksResponse. + + + :param next_page_token: The next_page_token of this V2beta1ListArtifactTasksResponse. # noqa: E501 + :type next_page_token: str + """ + + self._next_page_token = next_page_token + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, V2beta1ListArtifactTasksResponse): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, V2beta1ListArtifactTasksResponse): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_list_runs_request_view_mode.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_list_runs_request_view_mode.py new file mode 100644 index 00000000000..d0f42b75c44 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_list_runs_request_view_mode.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class V2beta1ListRunsRequestViewMode(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + DEFAULT = "DEFAULT" + FULL = "FULL" + + allowable_values = [DEFAULT, FULL] # noqa: E501 + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self, local_vars_configuration=None): # noqa: E501 + """V2beta1ListRunsRequestViewMode - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + self.discriminator = None + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, V2beta1ListRunsRequestViewMode): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, V2beta1ListRunsRequestViewMode): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_list_tasks_response.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_list_tasks_response.py new file mode 100644 index 00000000000..80a6c7d5215 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_list_tasks_response.py @@ -0,0 +1,172 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class V2beta1ListTasksResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'tasks': 'list[V2beta1PipelineTaskDetail]', + 'next_page_token': 'str', + 'total_size': 'int' + } + + attribute_map = { + 'tasks': 'tasks', + 'next_page_token': 'next_page_token', + 'total_size': 'total_size' + } + + def __init__(self, tasks=None, next_page_token=None, total_size=None, local_vars_configuration=None): # noqa: E501 + """V2beta1ListTasksResponse - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._tasks = None + self._next_page_token = None + self._total_size = None + self.discriminator = None + + if tasks is not None: + self.tasks = tasks + if next_page_token is not None: + self.next_page_token = next_page_token + if total_size is not None: + self.total_size = total_size + + @property + def tasks(self): + """Gets the tasks of this V2beta1ListTasksResponse. # noqa: E501 + + + :return: The tasks of this V2beta1ListTasksResponse. # noqa: E501 + :rtype: list[V2beta1PipelineTaskDetail] + """ + return self._tasks + + @tasks.setter + def tasks(self, tasks): + """Sets the tasks of this V2beta1ListTasksResponse. + + + :param tasks: The tasks of this V2beta1ListTasksResponse. # noqa: E501 + :type tasks: list[V2beta1PipelineTaskDetail] + """ + + self._tasks = tasks + + @property + def next_page_token(self): + """Gets the next_page_token of this V2beta1ListTasksResponse. # noqa: E501 + + + :return: The next_page_token of this V2beta1ListTasksResponse. # noqa: E501 + :rtype: str + """ + return self._next_page_token + + @next_page_token.setter + def next_page_token(self, next_page_token): + """Sets the next_page_token of this V2beta1ListTasksResponse. + + + :param next_page_token: The next_page_token of this V2beta1ListTasksResponse. # noqa: E501 + :type next_page_token: str + """ + + self._next_page_token = next_page_token + + @property + def total_size(self): + """Gets the total_size of this V2beta1ListTasksResponse. # noqa: E501 + + + :return: The total_size of this V2beta1ListTasksResponse. # noqa: E501 + :rtype: int + """ + return self._total_size + + @total_size.setter + def total_size(self, total_size): + """Sets the total_size of this V2beta1ListTasksResponse. + + + :param total_size: The total_size of this V2beta1ListTasksResponse. # noqa: E501 + :type total_size: int + """ + + self._total_size = total_size + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, V2beta1ListTasksResponse): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, V2beta1ListTasksResponse): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_pipeline_task_detail.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_pipeline_task_detail.py index 96f0c79a08d..1052e62448a 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_pipeline_task_detail.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_pipeline_task_detail.py @@ -33,122 +33,163 @@ class V2beta1PipelineTaskDetail(object): and the value is json key in definition. """ openapi_types = { - 'run_id': 'str', - 'task_id': 'str', + 'name': 'str', 'display_name': 'str', + 'task_id': 'str', + 'run_id': 'str', + 'pods': 'list[PipelineTaskDetailTaskPod]', + 'cache_fingerprint': 'str', 'create_time': 'datetime', 'start_time': 'datetime', 'end_time': 'datetime', - 'executor_detail': 'V2beta1PipelineTaskExecutorDetail', - 'state': 'V2beta1RuntimeState', - 'execution_id': 'str', + 'state': 'PipelineTaskDetailTaskState', + 'status_metadata': 'PipelineTaskDetailStatusMetadata', + 'state_history': 'list[PipelineTaskDetailTaskStatus]', + 'type': 'PipelineTaskDetailTaskType', + 'type_attributes': 'PipelineTaskDetailTypeAttributes', 'error': 'GooglerpcStatus', - 'inputs': 'dict(str, V2beta1ArtifactList)', - 'outputs': 'dict(str, V2beta1ArtifactList)', 'parent_task_id': 'str', - 'state_history': 'list[V2beta1RuntimeStatus]', - 'pod_name': 'str', - 'child_tasks': 'list[PipelineTaskDetailChildTask]' + 'child_tasks': 'list[PipelineTaskDetailChildTask]', + 'inputs': 'PipelineTaskDetailInputOutputs', + 'outputs': 'PipelineTaskDetailInputOutputs', + 'scope_path': 'list[str]' } attribute_map = { - 'run_id': 'run_id', - 'task_id': 'task_id', + 'name': 'name', 'display_name': 'display_name', + 'task_id': 'task_id', + 'run_id': 'run_id', + 'pods': 'pods', + 'cache_fingerprint': 'cache_fingerprint', 'create_time': 'create_time', 'start_time': 'start_time', 'end_time': 'end_time', - 'executor_detail': 'executor_detail', 'state': 'state', - 'execution_id': 'execution_id', + 'status_metadata': 'status_metadata', + 'state_history': 'state_history', + 'type': 'type', + 'type_attributes': 'type_attributes', 'error': 'error', + 'parent_task_id': 'parent_task_id', + 'child_tasks': 'child_tasks', 'inputs': 'inputs', 'outputs': 'outputs', - 'parent_task_id': 'parent_task_id', - 'state_history': 'state_history', - 'pod_name': 'pod_name', - 'child_tasks': 'child_tasks' + 'scope_path': 'scope_path' } - def __init__(self, run_id=None, task_id=None, display_name=None, create_time=None, start_time=None, end_time=None, executor_detail=None, state=None, execution_id=None, error=None, inputs=None, outputs=None, parent_task_id=None, state_history=None, pod_name=None, child_tasks=None, local_vars_configuration=None): # noqa: E501 + def __init__(self, name=None, display_name=None, task_id=None, run_id=None, pods=None, cache_fingerprint=None, create_time=None, start_time=None, end_time=None, state=None, status_metadata=None, state_history=None, type=None, type_attributes=None, error=None, parent_task_id=None, child_tasks=None, inputs=None, outputs=None, scope_path=None, local_vars_configuration=None): # noqa: E501 """V2beta1PipelineTaskDetail - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration - self._run_id = None - self._task_id = None + self._name = None self._display_name = None + self._task_id = None + self._run_id = None + self._pods = None + self._cache_fingerprint = None self._create_time = None self._start_time = None self._end_time = None - self._executor_detail = None self._state = None - self._execution_id = None + self._status_metadata = None + self._state_history = None + self._type = None + self._type_attributes = None self._error = None - self._inputs = None - self._outputs = None self._parent_task_id = None - self._state_history = None - self._pod_name = None self._child_tasks = None + self._inputs = None + self._outputs = None + self._scope_path = None self.discriminator = None - if run_id is not None: - self.run_id = run_id - if task_id is not None: - self.task_id = task_id + if name is not None: + self.name = name if display_name is not None: self.display_name = display_name + if task_id is not None: + self.task_id = task_id + if run_id is not None: + self.run_id = run_id + if pods is not None: + self.pods = pods + if cache_fingerprint is not None: + self.cache_fingerprint = cache_fingerprint if create_time is not None: self.create_time = create_time if start_time is not None: self.start_time = start_time if end_time is not None: self.end_time = end_time - if executor_detail is not None: - self.executor_detail = executor_detail if state is not None: self.state = state - if execution_id is not None: - self.execution_id = execution_id + if status_metadata is not None: + self.status_metadata = status_metadata + if state_history is not None: + self.state_history = state_history + if type is not None: + self.type = type + if type_attributes is not None: + self.type_attributes = type_attributes if error is not None: self.error = error - if inputs is not None: - self.inputs = inputs - if outputs is not None: - self.outputs = outputs if parent_task_id is not None: self.parent_task_id = parent_task_id - if state_history is not None: - self.state_history = state_history - if pod_name is not None: - self.pod_name = pod_name if child_tasks is not None: self.child_tasks = child_tasks + if inputs is not None: + self.inputs = inputs + if outputs is not None: + self.outputs = outputs + if scope_path is not None: + self.scope_path = scope_path @property - def run_id(self): - """Gets the run_id of this V2beta1PipelineTaskDetail. # noqa: E501 + def name(self): + """Gets the name of this V2beta1PipelineTaskDetail. # noqa: E501 - ID of the parent run. # noqa: E501 - :return: The run_id of this V2beta1PipelineTaskDetail. # noqa: E501 + :return: The name of this V2beta1PipelineTaskDetail. # noqa: E501 :rtype: str """ - return self._run_id + return self._name - @run_id.setter - def run_id(self, run_id): - """Sets the run_id of this V2beta1PipelineTaskDetail. + @name.setter + def name(self, name): + """Sets the name of this V2beta1PipelineTaskDetail. - ID of the parent run. # noqa: E501 - :param run_id: The run_id of this V2beta1PipelineTaskDetail. # noqa: E501 - :type run_id: str + :param name: The name of this V2beta1PipelineTaskDetail. # noqa: E501 + :type name: str """ - self._run_id = run_id + self._name = name + + @property + def display_name(self): + """Gets the display_name of this V2beta1PipelineTaskDetail. # noqa: E501 + + User specified name of a task that is defined in [Pipeline.spec][]. # noqa: E501 + + :return: The display_name of this V2beta1PipelineTaskDetail. # noqa: E501 + :rtype: str + """ + return self._display_name + + @display_name.setter + def display_name(self, display_name): + """Sets the display_name of this V2beta1PipelineTaskDetail. + + User specified name of a task that is defined in [Pipeline.spec][]. # noqa: E501 + + :param display_name: The display_name of this V2beta1PipelineTaskDetail. # noqa: E501 + :type display_name: str + """ + + self._display_name = display_name @property def task_id(self): @@ -174,27 +215,69 @@ def task_id(self, task_id): self._task_id = task_id @property - def display_name(self): - """Gets the display_name of this V2beta1PipelineTaskDetail. # noqa: E501 + def run_id(self): + """Gets the run_id of this V2beta1PipelineTaskDetail. # noqa: E501 - User specified name of a task that is defined in [Pipeline.spec][]. # noqa: E501 + ID of the parent run. # noqa: E501 - :return: The display_name of this V2beta1PipelineTaskDetail. # noqa: E501 + :return: The run_id of this V2beta1PipelineTaskDetail. # noqa: E501 :rtype: str """ - return self._display_name + return self._run_id - @display_name.setter - def display_name(self, display_name): - """Sets the display_name of this V2beta1PipelineTaskDetail. + @run_id.setter + def run_id(self, run_id): + """Sets the run_id of this V2beta1PipelineTaskDetail. - User specified name of a task that is defined in [Pipeline.spec][]. # noqa: E501 + ID of the parent run. # noqa: E501 - :param display_name: The display_name of this V2beta1PipelineTaskDetail. # noqa: E501 - :type display_name: str + :param run_id: The run_id of this V2beta1PipelineTaskDetail. # noqa: E501 + :type run_id: str """ - self._display_name = display_name + self._run_id = run_id + + @property + def pods(self): + """Gets the pods of this V2beta1PipelineTaskDetail. # noqa: E501 + + + :return: The pods of this V2beta1PipelineTaskDetail. # noqa: E501 + :rtype: list[PipelineTaskDetailTaskPod] + """ + return self._pods + + @pods.setter + def pods(self, pods): + """Sets the pods of this V2beta1PipelineTaskDetail. + + + :param pods: The pods of this V2beta1PipelineTaskDetail. # noqa: E501 + :type pods: list[PipelineTaskDetailTaskPod] + """ + + self._pods = pods + + @property + def cache_fingerprint(self): + """Gets the cache_fingerprint of this V2beta1PipelineTaskDetail. # noqa: E501 + + + :return: The cache_fingerprint of this V2beta1PipelineTaskDetail. # noqa: E501 + :rtype: str + """ + return self._cache_fingerprint + + @cache_fingerprint.setter + def cache_fingerprint(self, cache_fingerprint): + """Sets the cache_fingerprint of this V2beta1PipelineTaskDetail. + + + :param cache_fingerprint: The cache_fingerprint of this V2beta1PipelineTaskDetail. # noqa: E501 + :type cache_fingerprint: str + """ + + self._cache_fingerprint = cache_fingerprint @property def create_time(self): @@ -266,136 +349,132 @@ def end_time(self, end_time): self._end_time = end_time @property - def executor_detail(self): - """Gets the executor_detail of this V2beta1PipelineTaskDetail. # noqa: E501 + def state(self): + """Gets the state of this V2beta1PipelineTaskDetail. # noqa: E501 - :return: The executor_detail of this V2beta1PipelineTaskDetail. # noqa: E501 - :rtype: V2beta1PipelineTaskExecutorDetail + :return: The state of this V2beta1PipelineTaskDetail. # noqa: E501 + :rtype: PipelineTaskDetailTaskState """ - return self._executor_detail + return self._state - @executor_detail.setter - def executor_detail(self, executor_detail): - """Sets the executor_detail of this V2beta1PipelineTaskDetail. + @state.setter + def state(self, state): + """Sets the state of this V2beta1PipelineTaskDetail. - :param executor_detail: The executor_detail of this V2beta1PipelineTaskDetail. # noqa: E501 - :type executor_detail: V2beta1PipelineTaskExecutorDetail + :param state: The state of this V2beta1PipelineTaskDetail. # noqa: E501 + :type state: PipelineTaskDetailTaskState """ - self._executor_detail = executor_detail + self._state = state @property - def state(self): - """Gets the state of this V2beta1PipelineTaskDetail. # noqa: E501 + def status_metadata(self): + """Gets the status_metadata of this V2beta1PipelineTaskDetail. # noqa: E501 - :return: The state of this V2beta1PipelineTaskDetail. # noqa: E501 - :rtype: V2beta1RuntimeState + :return: The status_metadata of this V2beta1PipelineTaskDetail. # noqa: E501 + :rtype: PipelineTaskDetailStatusMetadata """ - return self._state + return self._status_metadata - @state.setter - def state(self, state): - """Sets the state of this V2beta1PipelineTaskDetail. + @status_metadata.setter + def status_metadata(self, status_metadata): + """Sets the status_metadata of this V2beta1PipelineTaskDetail. - :param state: The state of this V2beta1PipelineTaskDetail. # noqa: E501 - :type state: V2beta1RuntimeState + :param status_metadata: The status_metadata of this V2beta1PipelineTaskDetail. # noqa: E501 + :type status_metadata: PipelineTaskDetailStatusMetadata """ - self._state = state + self._status_metadata = status_metadata @property - def execution_id(self): - """Gets the execution_id of this V2beta1PipelineTaskDetail. # noqa: E501 + def state_history(self): + """Gets the state_history of this V2beta1PipelineTaskDetail. # noqa: E501 - Execution id of the corresponding entry in ML metadata store. # noqa: E501 + A sequence of task statuses. This field keeps a record of state transitions. # noqa: E501 - :return: The execution_id of this V2beta1PipelineTaskDetail. # noqa: E501 - :rtype: str + :return: The state_history of this V2beta1PipelineTaskDetail. # noqa: E501 + :rtype: list[PipelineTaskDetailTaskStatus] """ - return self._execution_id + return self._state_history - @execution_id.setter - def execution_id(self, execution_id): - """Sets the execution_id of this V2beta1PipelineTaskDetail. + @state_history.setter + def state_history(self, state_history): + """Sets the state_history of this V2beta1PipelineTaskDetail. - Execution id of the corresponding entry in ML metadata store. # noqa: E501 + A sequence of task statuses. This field keeps a record of state transitions. # noqa: E501 - :param execution_id: The execution_id of this V2beta1PipelineTaskDetail. # noqa: E501 - :type execution_id: str + :param state_history: The state_history of this V2beta1PipelineTaskDetail. # noqa: E501 + :type state_history: list[PipelineTaskDetailTaskStatus] """ - self._execution_id = execution_id + self._state_history = state_history @property - def error(self): - """Gets the error of this V2beta1PipelineTaskDetail. # noqa: E501 + def type(self): + """Gets the type of this V2beta1PipelineTaskDetail. # noqa: E501 - :return: The error of this V2beta1PipelineTaskDetail. # noqa: E501 - :rtype: GooglerpcStatus + :return: The type of this V2beta1PipelineTaskDetail. # noqa: E501 + :rtype: PipelineTaskDetailTaskType """ - return self._error + return self._type - @error.setter - def error(self, error): - """Sets the error of this V2beta1PipelineTaskDetail. + @type.setter + def type(self, type): + """Sets the type of this V2beta1PipelineTaskDetail. - :param error: The error of this V2beta1PipelineTaskDetail. # noqa: E501 - :type error: GooglerpcStatus + :param type: The type of this V2beta1PipelineTaskDetail. # noqa: E501 + :type type: PipelineTaskDetailTaskType """ - self._error = error + self._type = type @property - def inputs(self): - """Gets the inputs of this V2beta1PipelineTaskDetail. # noqa: E501 + def type_attributes(self): + """Gets the type_attributes of this V2beta1PipelineTaskDetail. # noqa: E501 - Input artifacts of the task. # noqa: E501 - :return: The inputs of this V2beta1PipelineTaskDetail. # noqa: E501 - :rtype: dict(str, V2beta1ArtifactList) + :return: The type_attributes of this V2beta1PipelineTaskDetail. # noqa: E501 + :rtype: PipelineTaskDetailTypeAttributes """ - return self._inputs + return self._type_attributes - @inputs.setter - def inputs(self, inputs): - """Sets the inputs of this V2beta1PipelineTaskDetail. + @type_attributes.setter + def type_attributes(self, type_attributes): + """Sets the type_attributes of this V2beta1PipelineTaskDetail. - Input artifacts of the task. # noqa: E501 - :param inputs: The inputs of this V2beta1PipelineTaskDetail. # noqa: E501 - :type inputs: dict(str, V2beta1ArtifactList) + :param type_attributes: The type_attributes of this V2beta1PipelineTaskDetail. # noqa: E501 + :type type_attributes: PipelineTaskDetailTypeAttributes """ - self._inputs = inputs + self._type_attributes = type_attributes @property - def outputs(self): - """Gets the outputs of this V2beta1PipelineTaskDetail. # noqa: E501 + def error(self): + """Gets the error of this V2beta1PipelineTaskDetail. # noqa: E501 - Output artifacts of the task. # noqa: E501 - :return: The outputs of this V2beta1PipelineTaskDetail. # noqa: E501 - :rtype: dict(str, V2beta1ArtifactList) + :return: The error of this V2beta1PipelineTaskDetail. # noqa: E501 + :rtype: GooglerpcStatus """ - return self._outputs + return self._error - @outputs.setter - def outputs(self, outputs): - """Sets the outputs of this V2beta1PipelineTaskDetail. + @error.setter + def error(self, error): + """Sets the error of this V2beta1PipelineTaskDetail. - Output artifacts of the task. # noqa: E501 - :param outputs: The outputs of this V2beta1PipelineTaskDetail. # noqa: E501 - :type outputs: dict(str, V2beta1ArtifactList) + :param error: The error of this V2beta1PipelineTaskDetail. # noqa: E501 + :type error: GooglerpcStatus """ - self._outputs = outputs + self._error = error @property def parent_task_id(self): @@ -421,73 +500,92 @@ def parent_task_id(self, parent_task_id): self._parent_task_id = parent_task_id @property - def state_history(self): - """Gets the state_history of this V2beta1PipelineTaskDetail. # noqa: E501 + def child_tasks(self): + """Gets the child_tasks of this V2beta1PipelineTaskDetail. # noqa: E501 - A sequence of task statuses. This field keeps a record of state transitions. # noqa: E501 + Sequence of dependent tasks. # noqa: E501 - :return: The state_history of this V2beta1PipelineTaskDetail. # noqa: E501 - :rtype: list[V2beta1RuntimeStatus] + :return: The child_tasks of this V2beta1PipelineTaskDetail. # noqa: E501 + :rtype: list[PipelineTaskDetailChildTask] """ - return self._state_history + return self._child_tasks - @state_history.setter - def state_history(self, state_history): - """Sets the state_history of this V2beta1PipelineTaskDetail. + @child_tasks.setter + def child_tasks(self, child_tasks): + """Sets the child_tasks of this V2beta1PipelineTaskDetail. - A sequence of task statuses. This field keeps a record of state transitions. # noqa: E501 + Sequence of dependent tasks. # noqa: E501 - :param state_history: The state_history of this V2beta1PipelineTaskDetail. # noqa: E501 - :type state_history: list[V2beta1RuntimeStatus] + :param child_tasks: The child_tasks of this V2beta1PipelineTaskDetail. # noqa: E501 + :type child_tasks: list[PipelineTaskDetailChildTask] """ - self._state_history = state_history + self._child_tasks = child_tasks @property - def pod_name(self): - """Gets the pod_name of this V2beta1PipelineTaskDetail. # noqa: E501 + def inputs(self): + """Gets the inputs of this V2beta1PipelineTaskDetail. # noqa: E501 - Name of the corresponding pod assigned by the orchestration engine. Also known as node_id. # noqa: E501 - :return: The pod_name of this V2beta1PipelineTaskDetail. # noqa: E501 - :rtype: str + :return: The inputs of this V2beta1PipelineTaskDetail. # noqa: E501 + :rtype: PipelineTaskDetailInputOutputs """ - return self._pod_name + return self._inputs - @pod_name.setter - def pod_name(self, pod_name): - """Sets the pod_name of this V2beta1PipelineTaskDetail. + @inputs.setter + def inputs(self, inputs): + """Sets the inputs of this V2beta1PipelineTaskDetail. - Name of the corresponding pod assigned by the orchestration engine. Also known as node_id. # noqa: E501 - :param pod_name: The pod_name of this V2beta1PipelineTaskDetail. # noqa: E501 - :type pod_name: str + :param inputs: The inputs of this V2beta1PipelineTaskDetail. # noqa: E501 + :type inputs: PipelineTaskDetailInputOutputs """ - self._pod_name = pod_name + self._inputs = inputs @property - def child_tasks(self): - """Gets the child_tasks of this V2beta1PipelineTaskDetail. # noqa: E501 + def outputs(self): + """Gets the outputs of this V2beta1PipelineTaskDetail. # noqa: E501 - Sequence of dependen tasks. # noqa: E501 - :return: The child_tasks of this V2beta1PipelineTaskDetail. # noqa: E501 - :rtype: list[PipelineTaskDetailChildTask] + :return: The outputs of this V2beta1PipelineTaskDetail. # noqa: E501 + :rtype: PipelineTaskDetailInputOutputs """ - return self._child_tasks + return self._outputs - @child_tasks.setter - def child_tasks(self, child_tasks): - """Sets the child_tasks of this V2beta1PipelineTaskDetail. + @outputs.setter + def outputs(self, outputs): + """Sets the outputs of this V2beta1PipelineTaskDetail. - Sequence of dependen tasks. # noqa: E501 - :param child_tasks: The child_tasks of this V2beta1PipelineTaskDetail. # noqa: E501 - :type child_tasks: list[PipelineTaskDetailChildTask] + :param outputs: The outputs of this V2beta1PipelineTaskDetail. # noqa: E501 + :type outputs: PipelineTaskDetailInputOutputs """ - self._child_tasks = child_tasks + self._outputs = outputs + + @property + def scope_path(self): + """Gets the scope_path of this V2beta1PipelineTaskDetail. # noqa: E501 + + The scope of this task within the pipeline spec. Each entry represents either a Dag Task or a Container task. Note that Container task will are always the last entry in a scope_path. # noqa: E501 + + :return: The scope_path of this V2beta1PipelineTaskDetail. # noqa: E501 + :rtype: list[str] + """ + return self._scope_path + + @scope_path.setter + def scope_path(self, scope_path): + """Sets the scope_path of this V2beta1PipelineTaskDetail. + + The scope of this task within the pipeline spec. Each entry represents either a Dag Task or a Container task. Note that Container task will are always the last entry in a scope_path. # noqa: E501 + + :param scope_path: The scope_path of this V2beta1PipelineTaskDetail. # noqa: E501 + :type scope_path: list[str] + """ + + self._scope_path = scope_path def to_dict(self): """Returns the model properties as a dict""" diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_pipeline_task_executor_detail.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_pipeline_task_executor_detail.py deleted file mode 100644 index 806d45cc94b..00000000000 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_pipeline_task_executor_detail.py +++ /dev/null @@ -1,206 +0,0 @@ -# coding: utf-8 - -""" - Kubeflow Pipelines API - - This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. - - Contact: kubeflow-pipelines@google.com - Generated by: https://openapi-generator.tech -""" - - -import pprint -import re # noqa: F401 - -import six - -from kfp_server_api.configuration import Configuration - - -class V2beta1PipelineTaskExecutorDetail(object): - """NOTE: This class is auto generated by OpenAPI Generator. - Ref: https://openapi-generator.tech - - Do not edit the class manually. - """ - - """ - Attributes: - openapi_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - openapi_types = { - 'main_job': 'str', - 'pre_caching_check_job': 'str', - 'failed_main_jobs': 'list[str]', - 'failed_pre_caching_check_jobs': 'list[str]' - } - - attribute_map = { - 'main_job': 'main_job', - 'pre_caching_check_job': 'pre_caching_check_job', - 'failed_main_jobs': 'failed_main_jobs', - 'failed_pre_caching_check_jobs': 'failed_pre_caching_check_jobs' - } - - def __init__(self, main_job=None, pre_caching_check_job=None, failed_main_jobs=None, failed_pre_caching_check_jobs=None, local_vars_configuration=None): # noqa: E501 - """V2beta1PipelineTaskExecutorDetail - a model defined in OpenAPI""" # noqa: E501 - if local_vars_configuration is None: - local_vars_configuration = Configuration() - self.local_vars_configuration = local_vars_configuration - - self._main_job = None - self._pre_caching_check_job = None - self._failed_main_jobs = None - self._failed_pre_caching_check_jobs = None - self.discriminator = None - - if main_job is not None: - self.main_job = main_job - if pre_caching_check_job is not None: - self.pre_caching_check_job = pre_caching_check_job - if failed_main_jobs is not None: - self.failed_main_jobs = failed_main_jobs - if failed_pre_caching_check_jobs is not None: - self.failed_pre_caching_check_jobs = failed_pre_caching_check_jobs - - @property - def main_job(self): - """Gets the main_job of this V2beta1PipelineTaskExecutorDetail. # noqa: E501 - - The name of the job for the main container execution. # noqa: E501 - - :return: The main_job of this V2beta1PipelineTaskExecutorDetail. # noqa: E501 - :rtype: str - """ - return self._main_job - - @main_job.setter - def main_job(self, main_job): - """Sets the main_job of this V2beta1PipelineTaskExecutorDetail. - - The name of the job for the main container execution. # noqa: E501 - - :param main_job: The main_job of this V2beta1PipelineTaskExecutorDetail. # noqa: E501 - :type main_job: str - """ - - self._main_job = main_job - - @property - def pre_caching_check_job(self): - """Gets the pre_caching_check_job of this V2beta1PipelineTaskExecutorDetail. # noqa: E501 - - The name of the job for the pre-caching-check container execution. This job will be available if the Run.pipeline_spec specifies the `pre_caching_check` hook in the lifecycle events. # noqa: E501 - - :return: The pre_caching_check_job of this V2beta1PipelineTaskExecutorDetail. # noqa: E501 - :rtype: str - """ - return self._pre_caching_check_job - - @pre_caching_check_job.setter - def pre_caching_check_job(self, pre_caching_check_job): - """Sets the pre_caching_check_job of this V2beta1PipelineTaskExecutorDetail. - - The name of the job for the pre-caching-check container execution. This job will be available if the Run.pipeline_spec specifies the `pre_caching_check` hook in the lifecycle events. # noqa: E501 - - :param pre_caching_check_job: The pre_caching_check_job of this V2beta1PipelineTaskExecutorDetail. # noqa: E501 - :type pre_caching_check_job: str - """ - - self._pre_caching_check_job = pre_caching_check_job - - @property - def failed_main_jobs(self): - """Gets the failed_main_jobs of this V2beta1PipelineTaskExecutorDetail. # noqa: E501 - - The names of the previously failed job for the main container executions. The list includes the all attempts in chronological order. # noqa: E501 - - :return: The failed_main_jobs of this V2beta1PipelineTaskExecutorDetail. # noqa: E501 - :rtype: list[str] - """ - return self._failed_main_jobs - - @failed_main_jobs.setter - def failed_main_jobs(self, failed_main_jobs): - """Sets the failed_main_jobs of this V2beta1PipelineTaskExecutorDetail. - - The names of the previously failed job for the main container executions. The list includes the all attempts in chronological order. # noqa: E501 - - :param failed_main_jobs: The failed_main_jobs of this V2beta1PipelineTaskExecutorDetail. # noqa: E501 - :type failed_main_jobs: list[str] - """ - - self._failed_main_jobs = failed_main_jobs - - @property - def failed_pre_caching_check_jobs(self): - """Gets the failed_pre_caching_check_jobs of this V2beta1PipelineTaskExecutorDetail. # noqa: E501 - - The names of the previously failed job for the pre-caching-check container executions. This job will be available if the Run.pipeline_spec specifies the `pre_caching_check` hook in the lifecycle events. The list includes the all attempts in chronological order. # noqa: E501 - - :return: The failed_pre_caching_check_jobs of this V2beta1PipelineTaskExecutorDetail. # noqa: E501 - :rtype: list[str] - """ - return self._failed_pre_caching_check_jobs - - @failed_pre_caching_check_jobs.setter - def failed_pre_caching_check_jobs(self, failed_pre_caching_check_jobs): - """Sets the failed_pre_caching_check_jobs of this V2beta1PipelineTaskExecutorDetail. - - The names of the previously failed job for the pre-caching-check container executions. This job will be available if the Run.pipeline_spec specifies the `pre_caching_check` hook in the lifecycle events. The list includes the all attempts in chronological order. # noqa: E501 - - :param failed_pre_caching_check_jobs: The failed_pre_caching_check_jobs of this V2beta1PipelineTaskExecutorDetail. # noqa: E501 - :type failed_pre_caching_check_jobs: list[str] - """ - - self._failed_pre_caching_check_jobs = failed_pre_caching_check_jobs - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.openapi_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, V2beta1PipelineTaskExecutorDetail): - return False - - return self.to_dict() == other.to_dict() - - def __ne__(self, other): - """Returns true if both objects are not equal""" - if not isinstance(other, V2beta1PipelineTaskExecutorDetail): - return True - - return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_run.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_run.py index 5f7c81436b7..21660ae3d9f 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_run.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_run.py @@ -50,7 +50,10 @@ class V2beta1Run(object): 'error': 'GooglerpcStatus', 'run_details': 'V2beta1RunDetails', 'recurring_run_id': 'str', - 'state_history': 'list[V2beta1RuntimeStatus]' + 'state_history': 'list[V2beta1RuntimeStatus]', + 'pipeline_reference': 'V2beta1PipelineVersionReference', + 'task_count': 'int', + 'tasks': 'list[V2beta1PipelineTaskDetail]' } attribute_map = { @@ -71,10 +74,13 @@ class V2beta1Run(object): 'error': 'error', 'run_details': 'run_details', 'recurring_run_id': 'recurring_run_id', - 'state_history': 'state_history' + 'state_history': 'state_history', + 'pipeline_reference': 'pipeline_reference', + 'task_count': 'task_count', + 'tasks': 'tasks' } - def __init__(self, experiment_id=None, run_id=None, display_name=None, storage_state=None, description=None, pipeline_version_id=None, pipeline_spec=None, pipeline_version_reference=None, runtime_config=None, service_account=None, created_at=None, scheduled_at=None, finished_at=None, state=None, error=None, run_details=None, recurring_run_id=None, state_history=None, local_vars_configuration=None): # noqa: E501 + def __init__(self, experiment_id=None, run_id=None, display_name=None, storage_state=None, description=None, pipeline_version_id=None, pipeline_spec=None, pipeline_version_reference=None, runtime_config=None, service_account=None, created_at=None, scheduled_at=None, finished_at=None, state=None, error=None, run_details=None, recurring_run_id=None, state_history=None, pipeline_reference=None, task_count=None, tasks=None, local_vars_configuration=None): # noqa: E501 """V2beta1Run - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() @@ -98,6 +104,9 @@ def __init__(self, experiment_id=None, run_id=None, display_name=None, storage_s self._run_details = None self._recurring_run_id = None self._state_history = None + self._pipeline_reference = None + self._task_count = None + self._tasks = None self.discriminator = None if experiment_id is not None: @@ -136,6 +145,12 @@ def __init__(self, experiment_id=None, run_id=None, display_name=None, storage_s self.recurring_run_id = recurring_run_id if state_history is not None: self.state_history = state_history + if pipeline_reference is not None: + self.pipeline_reference = pipeline_reference + if task_count is not None: + self.task_count = task_count + if tasks is not None: + self.tasks = tasks @property def experiment_id(self): @@ -539,6 +554,69 @@ def state_history(self, state_history): self._state_history = state_history + @property + def pipeline_reference(self): + """Gets the pipeline_reference of this V2beta1Run. # noqa: E501 + + + :return: The pipeline_reference of this V2beta1Run. # noqa: E501 + :rtype: V2beta1PipelineVersionReference + """ + return self._pipeline_reference + + @pipeline_reference.setter + def pipeline_reference(self, pipeline_reference): + """Sets the pipeline_reference of this V2beta1Run. + + + :param pipeline_reference: The pipeline_reference of this V2beta1Run. # noqa: E501 + :type pipeline_reference: V2beta1PipelineVersionReference + """ + + self._pipeline_reference = pipeline_reference + + @property + def task_count(self): + """Gets the task_count of this V2beta1Run. # noqa: E501 + + + :return: The task_count of this V2beta1Run. # noqa: E501 + :rtype: int + """ + return self._task_count + + @task_count.setter + def task_count(self, task_count): + """Sets the task_count of this V2beta1Run. + + + :param task_count: The task_count of this V2beta1Run. # noqa: E501 + :type task_count: int + """ + + self._task_count = task_count + + @property + def tasks(self): + """Gets the tasks of this V2beta1Run. # noqa: E501 + + + :return: The tasks of this V2beta1Run. # noqa: E501 + :rtype: list[V2beta1PipelineTaskDetail] + """ + return self._tasks + + @tasks.setter + def tasks(self, tasks): + """Sets the tasks of this V2beta1Run. + + + :param tasks: The tasks of this V2beta1Run. # noqa: E501 + :type tasks: list[V2beta1PipelineTaskDetail] + """ + + self._tasks = tasks + def to_dict(self): """Returns the model properties as a dict""" result = {} diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_artifact_list.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_update_tasks_bulk_request.py similarity index 68% rename from backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_artifact_list.py rename to backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_update_tasks_bulk_request.py index d3970594be7..c3e66cec3f5 100644 --- a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_artifact_list.py +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_update_tasks_bulk_request.py @@ -18,7 +18,7 @@ from kfp_server_api.configuration import Configuration -class V2beta1ArtifactList(object): +class V2beta1UpdateTasksBulkRequest(object): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -33,47 +33,45 @@ class V2beta1ArtifactList(object): and the value is json key in definition. """ openapi_types = { - 'artifact_ids': 'list[str]' + 'tasks': 'dict(str, V2beta1PipelineTaskDetail)' } attribute_map = { - 'artifact_ids': 'artifact_ids' + 'tasks': 'tasks' } - def __init__(self, artifact_ids=None, local_vars_configuration=None): # noqa: E501 - """V2beta1ArtifactList - a model defined in OpenAPI""" # noqa: E501 + def __init__(self, tasks=None, local_vars_configuration=None): # noqa: E501 + """V2beta1UpdateTasksBulkRequest - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration - self._artifact_ids = None + self._tasks = None self.discriminator = None - if artifact_ids is not None: - self.artifact_ids = artifact_ids + if tasks is not None: + self.tasks = tasks @property - def artifact_ids(self): - """Gets the artifact_ids of this V2beta1ArtifactList. # noqa: E501 + def tasks(self): + """Gets the tasks of this V2beta1UpdateTasksBulkRequest. # noqa: E501 - A list of artifact metadata ids. # noqa: E501 - :return: The artifact_ids of this V2beta1ArtifactList. # noqa: E501 - :rtype: list[str] + :return: The tasks of this V2beta1UpdateTasksBulkRequest. # noqa: E501 + :rtype: dict(str, V2beta1PipelineTaskDetail) """ - return self._artifact_ids + return self._tasks - @artifact_ids.setter - def artifact_ids(self, artifact_ids): - """Sets the artifact_ids of this V2beta1ArtifactList. + @tasks.setter + def tasks(self, tasks): + """Sets the tasks of this V2beta1UpdateTasksBulkRequest. - A list of artifact metadata ids. # noqa: E501 - :param artifact_ids: The artifact_ids of this V2beta1ArtifactList. # noqa: E501 - :type artifact_ids: list[str] + :param tasks: The tasks of this V2beta1UpdateTasksBulkRequest. # noqa: E501 + :type tasks: dict(str, V2beta1PipelineTaskDetail) """ - self._artifact_ids = artifact_ids + self._tasks = tasks def to_dict(self): """Returns the model properties as a dict""" @@ -109,14 +107,14 @@ def __repr__(self): def __eq__(self, other): """Returns true if both objects are equal""" - if not isinstance(other, V2beta1ArtifactList): + if not isinstance(other, V2beta1UpdateTasksBulkRequest): return False return self.to_dict() == other.to_dict() def __ne__(self, other): """Returns true if both objects are not equal""" - if not isinstance(other, V2beta1ArtifactList): + if not isinstance(other, V2beta1UpdateTasksBulkRequest): return True return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_update_tasks_bulk_response.py b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_update_tasks_bulk_response.py new file mode 100644 index 00000000000..793985cbbbe --- /dev/null +++ b/backend/api/v2beta1/python_http_client/kfp_server_api/models/v2beta1_update_tasks_bulk_response.py @@ -0,0 +1,120 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from kfp_server_api.configuration import Configuration + + +class V2beta1UpdateTasksBulkResponse(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'tasks': 'dict(str, V2beta1PipelineTaskDetail)' + } + + attribute_map = { + 'tasks': 'tasks' + } + + def __init__(self, tasks=None, local_vars_configuration=None): # noqa: E501 + """V2beta1UpdateTasksBulkResponse - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._tasks = None + self.discriminator = None + + if tasks is not None: + self.tasks = tasks + + @property + def tasks(self): + """Gets the tasks of this V2beta1UpdateTasksBulkResponse. # noqa: E501 + + + :return: The tasks of this V2beta1UpdateTasksBulkResponse. # noqa: E501 + :rtype: dict(str, V2beta1PipelineTaskDetail) + """ + return self._tasks + + @tasks.setter + def tasks(self, tasks): + """Sets the tasks of this V2beta1UpdateTasksBulkResponse. + + + :param tasks: The tasks of this V2beta1UpdateTasksBulkResponse. # noqa: E501 + :type tasks: dict(str, V2beta1PipelineTaskDetail) + """ + + self._tasks = tasks + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, V2beta1UpdateTasksBulkResponse): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, V2beta1UpdateTasksBulkResponse): + return True + + return self.to_dict() != other.to_dict() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_artifact_list.py b/backend/api/v2beta1/python_http_client/test/test_artifact_artifact_type.py similarity index 63% rename from backend/api/v2beta1/python_http_client/test/test_v2beta1_artifact_list.py rename to backend/api/v2beta1/python_http_client/test/test_artifact_artifact_type.py index b07f471dfb3..1e78feab912 100644 --- a/backend/api/v2beta1/python_http_client/test/test_v2beta1_artifact_list.py +++ b/backend/api/v2beta1/python_http_client/test/test_artifact_artifact_type.py @@ -16,11 +16,11 @@ import datetime import kfp_server_api -from kfp_server_api.models.v2beta1_artifact_list import V2beta1ArtifactList # noqa: E501 +from kfp_server_api.models.artifact_artifact_type import ArtifactArtifactType # noqa: E501 from kfp_server_api.rest import ApiException -class TestV2beta1ArtifactList(unittest.TestCase): - """V2beta1ArtifactList unit test stubs""" +class TestArtifactArtifactType(unittest.TestCase): + """ArtifactArtifactType unit test stubs""" def setUp(self): pass @@ -29,23 +29,20 @@ def tearDown(self): pass def make_instance(self, include_optional): - """Test V2beta1ArtifactList + """Test ArtifactArtifactType include_option is a boolean, when False only required params are included, when True both required and optional params are included """ - # model = kfp_server_api.models.v2beta1_artifact_list.V2beta1ArtifactList() # noqa: E501 + # model = kfp_server_api.models.artifact_artifact_type.ArtifactArtifactType() # noqa: E501 if include_optional : - return V2beta1ArtifactList( - artifact_ids = [ - '0' - ] + return ArtifactArtifactType( ) else : - return V2beta1ArtifactList( + return ArtifactArtifactType( ) - def testV2beta1ArtifactList(self): - """Test V2beta1ArtifactList""" + def testArtifactArtifactType(self): + """Test ArtifactArtifactType""" inst_req_only = self.make_instance(include_optional=False) inst_req_and_optional = self.make_instance(include_optional=True) diff --git a/backend/api/v2beta1/python_http_client/test/test_artifact_service_api.py b/backend/api/v2beta1/python_http_client/test/test_artifact_service_api.py new file mode 100644 index 00000000000..ae328f7cc74 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_artifact_service_api.py @@ -0,0 +1,82 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest + +import kfp_server_api +from kfp_server_api.api.artifact_service_api import ArtifactServiceApi # noqa: E501 +from kfp_server_api.rest import ApiException + + +class TestArtifactServiceApi(unittest.TestCase): + """ArtifactServiceApi unit test stubs""" + + def setUp(self): + self.api = kfp_server_api.api.artifact_service_api.ArtifactServiceApi() # noqa: E501 + + def tearDown(self): + pass + + def test_batch_create_artifact_tasks(self): + """Test case for batch_create_artifact_tasks + + Creates multiple artifact-task relationships in bulk. # noqa: E501 + """ + pass + + def test_batch_create_artifacts(self): + """Test case for batch_create_artifacts + + Creates multiple artifacts in bulk. # noqa: E501 + """ + pass + + def test_create_artifact(self): + """Test case for create_artifact + + Creates a new artifact. # noqa: E501 + """ + pass + + def test_create_artifact_task(self): + """Test case for create_artifact_task + + Creates an artifact-task relationship. # noqa: E501 + """ + pass + + def test_get_artifact(self): + """Test case for get_artifact + + Finds a specific Artifact by ID. # noqa: E501 + """ + pass + + def test_list_artifact_tasks(self): + """Test case for list_artifact_tasks + + Lists artifact-task relationships. # noqa: E501 + """ + pass + + def test_list_artifacts(self): + """Test case for list_artifacts + + Finds all artifacts within the specified namespace. # noqa: E501 + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_input_outputs_io_artifact.py b/backend/api/v2beta1/python_http_client/test/test_input_outputs_io_artifact.py new file mode 100644 index 00000000000..735fb55094f --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_input_outputs_io_artifact.py @@ -0,0 +1,70 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.input_outputs_io_artifact import InputOutputsIOArtifact # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestInputOutputsIOArtifact(unittest.TestCase): + """InputOutputsIOArtifact unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test InputOutputsIOArtifact + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.input_outputs_io_artifact.InputOutputsIOArtifact() # noqa: E501 + if include_optional : + return InputOutputsIOArtifact( + artifacts = [ + kfp_server_api.models.not_to_be_confused_with_runtime_artifact_in_pipeline_spec.Not to be confused with RuntimeArtifact in PipelineSpec( + artifact_id = '0', + name = '0', + description = '0', + type = 'TYPE_UNSPECIFIED', + uri = '0', + metadata = { + 'key' : None + }, + number_value = 1.337, + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + namespace = '0', ) + ], + type = 'UNSPECIFIED', + artifact_key = '0', + producer = kfp_server_api.models.v2beta1_io_producer.v2beta1IOProducer( + task_name = '0', + iteration = '0', ) + ) + else : + return InputOutputsIOArtifact( + ) + + def testInputOutputsIOArtifact(self): + """Test InputOutputsIOArtifact""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_child_task.py b/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_child_task.py index 1203c9bbe64..41baec0cde0 100644 --- a/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_child_task.py +++ b/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_child_task.py @@ -37,7 +37,7 @@ def make_instance(self, include_optional): if include_optional : return PipelineTaskDetailChildTask( task_id = '0', - pod_name = '0' + name = '0' ) else : return PipelineTaskDetailChildTask( diff --git a/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_input_outputs.py b/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_input_outputs.py new file mode 100644 index 00000000000..677380b5c96 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_input_outputs.py @@ -0,0 +1,82 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.pipeline_task_detail_input_outputs import PipelineTaskDetailInputOutputs # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestPipelineTaskDetailInputOutputs(unittest.TestCase): + """PipelineTaskDetailInputOutputs unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test PipelineTaskDetailInputOutputs + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.pipeline_task_detail_input_outputs.PipelineTaskDetailInputOutputs() # noqa: E501 + if include_optional : + return PipelineTaskDetailInputOutputs( + parameters = [ + kfp_server_api.models.input_outputs_io_parameter.InputOutputsIOParameter( + value = kfp_server_api.models.value.value(), + type = 'UNSPECIFIED', + parameter_key = '0', + producer = kfp_server_api.models.v2beta1_io_producer.v2beta1IOProducer( + task_name = '0', + iteration = '0', ), ) + ], + artifacts = [ + kfp_server_api.models.align_structure_with_executor_input.Align structure with Executor Input( + artifacts = [ + kfp_server_api.models.not_to_be_confused_with_runtime_artifact_in_pipeline_spec.Not to be confused with RuntimeArtifact in PipelineSpec( + artifact_id = '0', + name = '0', + description = '0', + type = 'TYPE_UNSPECIFIED', + uri = '0', + metadata = { + 'key' : None + }, + number_value = 1.337, + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + namespace = '0', ) + ], + type = 'UNSPECIFIED', + artifact_key = '0', + producer = kfp_server_api.models.v2beta1_io_producer.v2beta1IOProducer( + task_name = '0', + iteration = '0', ), ) + ] + ) + else : + return PipelineTaskDetailInputOutputs( + ) + + def testPipelineTaskDetailInputOutputs(self): + """Test PipelineTaskDetailInputOutputs""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_pipeline_task_executor_detail.py b/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_status_metadata.py similarity index 51% rename from backend/api/v2beta1/python_http_client/test/test_v2beta1_pipeline_task_executor_detail.py rename to backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_status_metadata.py index 6716008a21e..f86b3b7f265 100644 --- a/backend/api/v2beta1/python_http_client/test/test_v2beta1_pipeline_task_executor_detail.py +++ b/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_status_metadata.py @@ -16,11 +16,11 @@ import datetime import kfp_server_api -from kfp_server_api.models.v2beta1_pipeline_task_executor_detail import V2beta1PipelineTaskExecutorDetail # noqa: E501 +from kfp_server_api.models.pipeline_task_detail_status_metadata import PipelineTaskDetailStatusMetadata # noqa: E501 from kfp_server_api.rest import ApiException -class TestV2beta1PipelineTaskExecutorDetail(unittest.TestCase): - """V2beta1PipelineTaskExecutorDetail unit test stubs""" +class TestPipelineTaskDetailStatusMetadata(unittest.TestCase): + """PipelineTaskDetailStatusMetadata unit test stubs""" def setUp(self): pass @@ -29,28 +29,24 @@ def tearDown(self): pass def make_instance(self, include_optional): - """Test V2beta1PipelineTaskExecutorDetail + """Test PipelineTaskDetailStatusMetadata include_option is a boolean, when False only required params are included, when True both required and optional params are included """ - # model = kfp_server_api.models.v2beta1_pipeline_task_executor_detail.V2beta1PipelineTaskExecutorDetail() # noqa: E501 + # model = kfp_server_api.models.pipeline_task_detail_status_metadata.PipelineTaskDetailStatusMetadata() # noqa: E501 if include_optional : - return V2beta1PipelineTaskExecutorDetail( - main_job = '0', - pre_caching_check_job = '0', - failed_main_jobs = [ - '0' - ], - failed_pre_caching_check_jobs = [ - '0' - ] + return PipelineTaskDetailStatusMetadata( + message = '0', + custom_properties = { + 'key' : None + } ) else : - return V2beta1PipelineTaskExecutorDetail( + return PipelineTaskDetailStatusMetadata( ) - def testV2beta1PipelineTaskExecutorDetail(self): - """Test V2beta1PipelineTaskExecutorDetail""" + def testPipelineTaskDetailStatusMetadata(self): + """Test PipelineTaskDetailStatusMetadata""" inst_req_only = self.make_instance(include_optional=False) inst_req_and_optional = self.make_instance(include_optional=True) diff --git a/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_task_pod.py b/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_task_pod.py new file mode 100644 index 00000000000..4d1d247ab5a --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_task_pod.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.pipeline_task_detail_task_pod import PipelineTaskDetailTaskPod # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestPipelineTaskDetailTaskPod(unittest.TestCase): + """PipelineTaskDetailTaskPod unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test PipelineTaskDetailTaskPod + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.pipeline_task_detail_task_pod.PipelineTaskDetailTaskPod() # noqa: E501 + if include_optional : + return PipelineTaskDetailTaskPod( + name = '0', + uid = '0', + type = 'UNSPECIFIED' + ) + else : + return PipelineTaskDetailTaskPod( + ) + + def testPipelineTaskDetailTaskPod(self): + """Test PipelineTaskDetailTaskPod""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_task_pod_type.py b/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_task_pod_type.py new file mode 100644 index 00000000000..a53d411bc60 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_task_pod_type.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.pipeline_task_detail_task_pod_type import PipelineTaskDetailTaskPodType # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestPipelineTaskDetailTaskPodType(unittest.TestCase): + """PipelineTaskDetailTaskPodType unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test PipelineTaskDetailTaskPodType + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.pipeline_task_detail_task_pod_type.PipelineTaskDetailTaskPodType() # noqa: E501 + if include_optional : + return PipelineTaskDetailTaskPodType( + ) + else : + return PipelineTaskDetailTaskPodType( + ) + + def testPipelineTaskDetailTaskPodType(self): + """Test PipelineTaskDetailTaskPodType""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_task_state.py b/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_task_state.py new file mode 100644 index 00000000000..81f15414dd7 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_task_state.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.pipeline_task_detail_task_state import PipelineTaskDetailTaskState # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestPipelineTaskDetailTaskState(unittest.TestCase): + """PipelineTaskDetailTaskState unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test PipelineTaskDetailTaskState + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.pipeline_task_detail_task_state.PipelineTaskDetailTaskState() # noqa: E501 + if include_optional : + return PipelineTaskDetailTaskState( + ) + else : + return PipelineTaskDetailTaskState( + ) + + def testPipelineTaskDetailTaskState(self): + """Test PipelineTaskDetailTaskState""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_task_status.py b/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_task_status.py new file mode 100644 index 00000000000..e81a35f5674 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_task_status.py @@ -0,0 +1,61 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.pipeline_task_detail_task_status import PipelineTaskDetailTaskStatus # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestPipelineTaskDetailTaskStatus(unittest.TestCase): + """PipelineTaskDetailTaskStatus unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test PipelineTaskDetailTaskStatus + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.pipeline_task_detail_task_status.PipelineTaskDetailTaskStatus() # noqa: E501 + if include_optional : + return PipelineTaskDetailTaskStatus( + update_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + state = 'RUNTIME_STATE_UNSPECIFIED', + error = kfp_server_api.models.googlerpc_status.googlerpcStatus( + code = 56, + message = '0', + details = [ + { + 'key' : None + } + ], ) + ) + else : + return PipelineTaskDetailTaskStatus( + ) + + def testPipelineTaskDetailTaskStatus(self): + """Test PipelineTaskDetailTaskStatus""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_task_type.py b/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_task_type.py new file mode 100644 index 00000000000..e537c572f7a --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_task_type.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.pipeline_task_detail_task_type import PipelineTaskDetailTaskType # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestPipelineTaskDetailTaskType(unittest.TestCase): + """PipelineTaskDetailTaskType unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test PipelineTaskDetailTaskType + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.pipeline_task_detail_task_type.PipelineTaskDetailTaskType() # noqa: E501 + if include_optional : + return PipelineTaskDetailTaskType( + ) + else : + return PipelineTaskDetailTaskType( + ) + + def testPipelineTaskDetailTaskType(self): + """Test PipelineTaskDetailTaskType""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_type_attributes.py b/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_type_attributes.py new file mode 100644 index 00000000000..40cb7b1e255 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_pipeline_task_detail_type_attributes.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.pipeline_task_detail_type_attributes import PipelineTaskDetailTypeAttributes # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestPipelineTaskDetailTypeAttributes(unittest.TestCase): + """PipelineTaskDetailTypeAttributes unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test PipelineTaskDetailTypeAttributes + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.pipeline_task_detail_type_attributes.PipelineTaskDetailTypeAttributes() # noqa: E501 + if include_optional : + return PipelineTaskDetailTypeAttributes( + iteration_index = '0', + iteration_count = '0' + ) + else : + return PipelineTaskDetailTypeAttributes( + ) + + def testPipelineTaskDetailTypeAttributes(self): + """Test PipelineTaskDetailTypeAttributes""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_run_service_api.py b/backend/api/v2beta1/python_http_client/test/test_run_service_api.py index f9737e87fb5..82c3aaaefb3 100644 --- a/backend/api/v2beta1/python_http_client/test/test_run_service_api.py +++ b/backend/api/v2beta1/python_http_client/test/test_run_service_api.py @@ -28,6 +28,34 @@ def setUp(self): def tearDown(self): pass + def test_batch_update_tasks(self): + """Test case for batch_update_tasks + + Updates multiple tasks in bulk. # noqa: E501 + """ + pass + + def test_create_task(self): + """Test case for create_task + + Creates a new task. # noqa: E501 + """ + pass + + def test_get_task(self): + """Test case for get_task + + Gets a specific task by ID. # noqa: E501 + """ + pass + + def test_list_tasks(self): + """Test case for list_tasks + + Lists tasks with optional filtering. # noqa: E501 + """ + pass + def test_run_service_archive_run(self): """Test case for run_service_archive_run @@ -91,6 +119,13 @@ def test_run_service_unarchive_run(self): """ pass + def test_update_task(self): + """Test case for update_task + + Updates an existing task. # noqa: E501 + """ + pass + if __name__ == '__main__': unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_artifact.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_artifact.py new file mode 100644 index 00000000000..a9312da090e --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_artifact.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.v2beta1_artifact import V2beta1Artifact # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestV2beta1Artifact(unittest.TestCase): + """V2beta1Artifact unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test V2beta1Artifact + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.v2beta1_artifact.V2beta1Artifact() # noqa: E501 + if include_optional : + return V2beta1Artifact( + artifact_id = '0', + name = '0', + description = '0', + type = 'TYPE_UNSPECIFIED', + uri = '0', + metadata = { + 'key' : None + }, + number_value = 1.337, + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + namespace = '0' + ) + else : + return V2beta1Artifact( + ) + + def testV2beta1Artifact(self): + """Test V2beta1Artifact""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_artifact_task.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_artifact_task.py new file mode 100644 index 00000000000..8092fd73021 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_artifact_task.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.v2beta1_artifact_task import V2beta1ArtifactTask # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestV2beta1ArtifactTask(unittest.TestCase): + """V2beta1ArtifactTask unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test V2beta1ArtifactTask + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.v2beta1_artifact_task.V2beta1ArtifactTask() # noqa: E501 + if include_optional : + return V2beta1ArtifactTask( + id = '0', + artifact_id = '0', + run_id = '0', + task_id = '0', + type = 'UNSPECIFIED', + producer = kfp_server_api.models.v2beta1_io_producer.v2beta1IOProducer( + task_name = '0', + iteration = '0', ), + key = '0' + ) + else : + return V2beta1ArtifactTask( + ) + + def testV2beta1ArtifactTask(self): + """Test V2beta1ArtifactTask""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_artifact_request.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_artifact_request.py new file mode 100644 index 00000000000..45c041480f3 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_artifact_request.py @@ -0,0 +1,68 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.v2beta1_create_artifact_request import V2beta1CreateArtifactRequest # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestV2beta1CreateArtifactRequest(unittest.TestCase): + """V2beta1CreateArtifactRequest unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test V2beta1CreateArtifactRequest + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.v2beta1_create_artifact_request.V2beta1CreateArtifactRequest() # noqa: E501 + if include_optional : + return V2beta1CreateArtifactRequest( + artifact = kfp_server_api.models.not_to_be_confused_with_runtime_artifact_in_pipeline_spec.Not to be confused with RuntimeArtifact in PipelineSpec( + artifact_id = '0', + name = '0', + description = '0', + type = 'TYPE_UNSPECIFIED', + uri = '0', + metadata = { + 'key' : None + }, + number_value = 1.337, + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + namespace = '0', ), + run_id = '0', + task_id = '0', + producer_key = '0', + iteration_index = '0', + type = 'UNSPECIFIED' + ) + else : + return V2beta1CreateArtifactRequest( + ) + + def testV2beta1CreateArtifactRequest(self): + """Test V2beta1CreateArtifactRequest""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_artifact_task_request.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_artifact_task_request.py new file mode 100644 index 00000000000..4e4a55e4a2e --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_artifact_task_request.py @@ -0,0 +1,61 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.v2beta1_create_artifact_task_request import V2beta1CreateArtifactTaskRequest # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestV2beta1CreateArtifactTaskRequest(unittest.TestCase): + """V2beta1CreateArtifactTaskRequest unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test V2beta1CreateArtifactTaskRequest + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.v2beta1_create_artifact_task_request.V2beta1CreateArtifactTaskRequest() # noqa: E501 + if include_optional : + return V2beta1CreateArtifactTaskRequest( + artifact_task = kfp_server_api.models.describes_a_relationship_link_between_artifacts_and_tasks.Describes a relationship link between Artifacts and Tasks( + id = '0', + artifact_id = '0', + run_id = '0', + task_id = '0', + type = 'UNSPECIFIED', + producer = kfp_server_api.models.v2beta1_io_producer.v2beta1IOProducer( + task_name = '0', + iteration = '0', ), + key = '0', ) + ) + else : + return V2beta1CreateArtifactTaskRequest( + ) + + def testV2beta1CreateArtifactTaskRequest(self): + """Test V2beta1CreateArtifactTaskRequest""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_artifact_tasks_bulk_request.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_artifact_tasks_bulk_request.py new file mode 100644 index 00000000000..1258a6f3359 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_artifact_tasks_bulk_request.py @@ -0,0 +1,63 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.v2beta1_create_artifact_tasks_bulk_request import V2beta1CreateArtifactTasksBulkRequest # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestV2beta1CreateArtifactTasksBulkRequest(unittest.TestCase): + """V2beta1CreateArtifactTasksBulkRequest unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test V2beta1CreateArtifactTasksBulkRequest + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.v2beta1_create_artifact_tasks_bulk_request.V2beta1CreateArtifactTasksBulkRequest() # noqa: E501 + if include_optional : + return V2beta1CreateArtifactTasksBulkRequest( + artifact_tasks = [ + kfp_server_api.models.describes_a_relationship_link_between_artifacts_and_tasks.Describes a relationship link between Artifacts and Tasks( + id = '0', + artifact_id = '0', + run_id = '0', + task_id = '0', + type = 'UNSPECIFIED', + producer = kfp_server_api.models.v2beta1_io_producer.v2beta1IOProducer( + task_name = '0', + iteration = '0', ), + key = '0', ) + ] + ) + else : + return V2beta1CreateArtifactTasksBulkRequest( + ) + + def testV2beta1CreateArtifactTasksBulkRequest(self): + """Test V2beta1CreateArtifactTasksBulkRequest""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_artifact_tasks_bulk_response.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_artifact_tasks_bulk_response.py new file mode 100644 index 00000000000..153bc5ba3d8 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_artifact_tasks_bulk_response.py @@ -0,0 +1,63 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.v2beta1_create_artifact_tasks_bulk_response import V2beta1CreateArtifactTasksBulkResponse # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestV2beta1CreateArtifactTasksBulkResponse(unittest.TestCase): + """V2beta1CreateArtifactTasksBulkResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test V2beta1CreateArtifactTasksBulkResponse + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.v2beta1_create_artifact_tasks_bulk_response.V2beta1CreateArtifactTasksBulkResponse() # noqa: E501 + if include_optional : + return V2beta1CreateArtifactTasksBulkResponse( + artifact_tasks = [ + kfp_server_api.models.describes_a_relationship_link_between_artifacts_and_tasks.Describes a relationship link between Artifacts and Tasks( + id = '0', + artifact_id = '0', + run_id = '0', + task_id = '0', + type = 'UNSPECIFIED', + producer = kfp_server_api.models.v2beta1_io_producer.v2beta1IOProducer( + task_name = '0', + iteration = '0', ), + key = '0', ) + ] + ) + else : + return V2beta1CreateArtifactTasksBulkResponse( + ) + + def testV2beta1CreateArtifactTasksBulkResponse(self): + """Test V2beta1CreateArtifactTasksBulkResponse""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_artifacts_bulk_request.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_artifacts_bulk_request.py new file mode 100644 index 00000000000..9ea15943b5f --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_artifacts_bulk_request.py @@ -0,0 +1,71 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.v2beta1_create_artifacts_bulk_request import V2beta1CreateArtifactsBulkRequest # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestV2beta1CreateArtifactsBulkRequest(unittest.TestCase): + """V2beta1CreateArtifactsBulkRequest unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test V2beta1CreateArtifactsBulkRequest + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.v2beta1_create_artifacts_bulk_request.V2beta1CreateArtifactsBulkRequest() # noqa: E501 + if include_optional : + return V2beta1CreateArtifactsBulkRequest( + artifacts = [ + kfp_server_api.models.v2beta1_create_artifact_request.v2beta1CreateArtifactRequest( + artifact = kfp_server_api.models.not_to_be_confused_with_runtime_artifact_in_pipeline_spec.Not to be confused with RuntimeArtifact in PipelineSpec( + artifact_id = '0', + name = '0', + description = '0', + type = 'TYPE_UNSPECIFIED', + uri = '0', + metadata = { + 'key' : None + }, + number_value = 1.337, + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + namespace = '0', ), + run_id = '0', + task_id = '0', + producer_key = '0', + iteration_index = '0', + type = 'UNSPECIFIED', ) + ] + ) + else : + return V2beta1CreateArtifactsBulkRequest( + ) + + def testV2beta1CreateArtifactsBulkRequest(self): + """Test V2beta1CreateArtifactsBulkRequest""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_artifacts_bulk_response.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_artifacts_bulk_response.py new file mode 100644 index 00000000000..69e381554d1 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_create_artifacts_bulk_response.py @@ -0,0 +1,65 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.v2beta1_create_artifacts_bulk_response import V2beta1CreateArtifactsBulkResponse # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestV2beta1CreateArtifactsBulkResponse(unittest.TestCase): + """V2beta1CreateArtifactsBulkResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test V2beta1CreateArtifactsBulkResponse + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.v2beta1_create_artifacts_bulk_response.V2beta1CreateArtifactsBulkResponse() # noqa: E501 + if include_optional : + return V2beta1CreateArtifactsBulkResponse( + artifacts = [ + kfp_server_api.models.not_to_be_confused_with_runtime_artifact_in_pipeline_spec.Not to be confused with RuntimeArtifact in PipelineSpec( + artifact_id = '0', + name = '0', + description = '0', + type = 'TYPE_UNSPECIFIED', + uri = '0', + metadata = { + 'key' : None + }, + number_value = 1.337, + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + namespace = '0', ) + ] + ) + else : + return V2beta1CreateArtifactsBulkResponse( + ) + + def testV2beta1CreateArtifactsBulkResponse(self): + """Test V2beta1CreateArtifactsBulkResponse""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_get_run_request_view_mode.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_get_run_request_view_mode.py new file mode 100644 index 00000000000..0262d5bbc26 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_get_run_request_view_mode.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.v2beta1_get_run_request_view_mode import V2beta1GetRunRequestViewMode # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestV2beta1GetRunRequestViewMode(unittest.TestCase): + """V2beta1GetRunRequestViewMode unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test V2beta1GetRunRequestViewMode + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.v2beta1_get_run_request_view_mode.V2beta1GetRunRequestViewMode() # noqa: E501 + if include_optional : + return V2beta1GetRunRequestViewMode( + ) + else : + return V2beta1GetRunRequestViewMode( + ) + + def testV2beta1GetRunRequestViewMode(self): + """Test V2beta1GetRunRequestViewMode""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_io_producer.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_io_producer.py new file mode 100644 index 00000000000..d08caca25d9 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_io_producer.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.v2beta1_io_producer import V2beta1IOProducer # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestV2beta1IOProducer(unittest.TestCase): + """V2beta1IOProducer unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test V2beta1IOProducer + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.v2beta1_io_producer.V2beta1IOProducer() # noqa: E501 + if include_optional : + return V2beta1IOProducer( + task_name = '0', + iteration = '0' + ) + else : + return V2beta1IOProducer( + ) + + def testV2beta1IOProducer(self): + """Test V2beta1IOProducer""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_io_type.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_io_type.py new file mode 100644 index 00000000000..9e52558c421 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_io_type.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.v2beta1_io_type import V2beta1IOType # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestV2beta1IOType(unittest.TestCase): + """V2beta1IOType unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test V2beta1IOType + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.v2beta1_io_type.V2beta1IOType() # noqa: E501 + if include_optional : + return V2beta1IOType( + ) + else : + return V2beta1IOType( + ) + + def testV2beta1IOType(self): + """Test V2beta1IOType""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_artifact_response.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_artifact_response.py new file mode 100644 index 00000000000..e06152c6760 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_artifact_response.py @@ -0,0 +1,67 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.v2beta1_list_artifact_response import V2beta1ListArtifactResponse # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestV2beta1ListArtifactResponse(unittest.TestCase): + """V2beta1ListArtifactResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test V2beta1ListArtifactResponse + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.v2beta1_list_artifact_response.V2beta1ListArtifactResponse() # noqa: E501 + if include_optional : + return V2beta1ListArtifactResponse( + artifacts = [ + kfp_server_api.models.not_to_be_confused_with_runtime_artifact_in_pipeline_spec.Not to be confused with RuntimeArtifact in PipelineSpec( + artifact_id = '0', + name = '0', + description = '0', + type = 'TYPE_UNSPECIFIED', + uri = '0', + metadata = { + 'key' : None + }, + number_value = 1.337, + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + namespace = '0', ) + ], + total_size = 56, + next_page_token = '0' + ) + else : + return V2beta1ListArtifactResponse( + ) + + def testV2beta1ListArtifactResponse(self): + """Test V2beta1ListArtifactResponse""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_artifact_tasks_response.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_artifact_tasks_response.py new file mode 100644 index 00000000000..6145221df1e --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_artifact_tasks_response.py @@ -0,0 +1,65 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.v2beta1_list_artifact_tasks_response import V2beta1ListArtifactTasksResponse # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestV2beta1ListArtifactTasksResponse(unittest.TestCase): + """V2beta1ListArtifactTasksResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test V2beta1ListArtifactTasksResponse + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.v2beta1_list_artifact_tasks_response.V2beta1ListArtifactTasksResponse() # noqa: E501 + if include_optional : + return V2beta1ListArtifactTasksResponse( + artifact_tasks = [ + kfp_server_api.models.describes_a_relationship_link_between_artifacts_and_tasks.Describes a relationship link between Artifacts and Tasks( + id = '0', + artifact_id = '0', + run_id = '0', + task_id = '0', + type = 'UNSPECIFIED', + producer = kfp_server_api.models.v2beta1_io_producer.v2beta1IOProducer( + task_name = '0', + iteration = '0', ), + key = '0', ) + ], + total_size = 56, + next_page_token = '0' + ) + else : + return V2beta1ListArtifactTasksResponse( + ) + + def testV2beta1ListArtifactTasksResponse(self): + """Test V2beta1ListArtifactTasksResponse""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_runs_request_view_mode.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_runs_request_view_mode.py new file mode 100644 index 00000000000..7bc97b3f0ae --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_runs_request_view_mode.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.v2beta1_list_runs_request_view_mode import V2beta1ListRunsRequestViewMode # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestV2beta1ListRunsRequestViewMode(unittest.TestCase): + """V2beta1ListRunsRequestViewMode unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test V2beta1ListRunsRequestViewMode + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.v2beta1_list_runs_request_view_mode.V2beta1ListRunsRequestViewMode() # noqa: E501 + if include_optional : + return V2beta1ListRunsRequestViewMode( + ) + else : + return V2beta1ListRunsRequestViewMode( + ) + + def testV2beta1ListRunsRequestViewMode(self): + """Test V2beta1ListRunsRequestViewMode""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_runs_response.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_runs_response.py index 588296c5ce9..a96e4b47775 100644 --- a/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_runs_response.py +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_runs_response.py @@ -71,47 +71,72 @@ def make_instance(self, include_optional): pipeline_run_context_id = '0', task_details = [ kfp_server_api.models.v2beta1_pipeline_task_detail.v2beta1PipelineTaskDetail( - run_id = '0', - task_id = '0', + name = '0', display_name = '0', + task_id = '0', + run_id = '0', + pods = [ + kfp_server_api.models.pipeline_task_detail_task_pod.PipelineTaskDetailTaskPod( + name = '0', + uid = '0', + type = 'UNSPECIFIED', ) + ], + cache_fingerprint = '0', create_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), start_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), end_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), - executor_detail = kfp_server_api.models.v2beta1_pipeline_task_executor_detail.v2beta1PipelineTaskExecutorDetail( - main_job = '0', - pre_caching_check_job = '0', - failed_main_jobs = [ - '0' - ], - failed_pre_caching_check_jobs = [ - '0' - ], ), - execution_id = '0', - inputs = { - 'key' : kfp_server_api.models.v2beta1_artifact_list.v2beta1ArtifactList( - artifact_ids = [ - '0' - ], ) - }, - outputs = { - 'key' : kfp_server_api.models.v2beta1_artifact_list.v2beta1ArtifactList() - }, - parent_task_id = '0', + status_metadata = kfp_server_api.models.pipeline_task_detail_status_metadata.PipelineTaskDetailStatusMetadata( + message = '0', + custom_properties = { + 'key' : None + }, ), state_history = [ - kfp_server_api.models.v2beta1_runtime_status.v2beta1RuntimeStatus( + kfp_server_api.models.pipeline_task_detail_task_status.PipelineTaskDetailTaskStatus( update_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), ) ], - pod_name = '0', + type = 'ROOT', + type_attributes = kfp_server_api.models.pipeline_task_detail_type_attributes.PipelineTaskDetailTypeAttributes( + iteration_index = '0', + iteration_count = '0', ), + parent_task_id = '0', child_tasks = [ kfp_server_api.models.pipeline_task_detail_child_task.PipelineTaskDetailChildTask( task_id = '0', - pod_name = '0', ) + name = '0', ) + ], + inputs = kfp_server_api.models.pipeline_task_detail_input_outputs.PipelineTaskDetailInputOutputs( + artifacts = [ + kfp_server_api.models.align_structure_with_executor_input.Align structure with Executor Input( + artifact_key = '0', + producer = kfp_server_api.models.v2beta1_io_producer.v2beta1IOProducer( + task_name = '0', + iteration = '0', ), ) + ], ), + outputs = kfp_server_api.models.pipeline_task_detail_input_outputs.PipelineTaskDetailInputOutputs(), + scope_path = [ + '0' ], ) ], ), recurring_run_id = '0', state_history = [ kfp_server_api.models.v2beta1_runtime_status.v2beta1RuntimeStatus( update_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), ) + ], + pipeline_reference = kfp_server_api.models.v2beta1_pipeline_version_reference.v2beta1PipelineVersionReference( + pipeline_id = '0', + pipeline_version_id = '0', ), + task_count = 56, + tasks = [ + kfp_server_api.models.v2beta1_pipeline_task_detail.v2beta1PipelineTaskDetail( + name = '0', + display_name = '0', + task_id = '0', + run_id = '0', + cache_fingerprint = '0', + create_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + start_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + end_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + parent_task_id = '0', ) ], ) ], total_size = 56, diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_tasks_response.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_tasks_response.py new file mode 100644 index 00000000000..9cd65ff47e4 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_list_tasks_response.py @@ -0,0 +1,123 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.v2beta1_list_tasks_response import V2beta1ListTasksResponse # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestV2beta1ListTasksResponse(unittest.TestCase): + """V2beta1ListTasksResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test V2beta1ListTasksResponse + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.v2beta1_list_tasks_response.V2beta1ListTasksResponse() # noqa: E501 + if include_optional : + return V2beta1ListTasksResponse( + tasks = [ + kfp_server_api.models.v2beta1_pipeline_task_detail.v2beta1PipelineTaskDetail( + name = '0', + display_name = '0', + task_id = '0', + run_id = '0', + pods = [ + kfp_server_api.models.pipeline_task_detail_task_pod.PipelineTaskDetailTaskPod( + name = '0', + uid = '0', + type = 'UNSPECIFIED', ) + ], + cache_fingerprint = '0', + create_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + start_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + end_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + state = 'RUNTIME_STATE_UNSPECIFIED', + status_metadata = kfp_server_api.models.pipeline_task_detail_status_metadata.PipelineTaskDetailStatusMetadata( + message = '0', + custom_properties = { + 'key' : None + }, ), + state_history = [ + kfp_server_api.models.pipeline_task_detail_task_status.PipelineTaskDetailTaskStatus( + update_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + error = kfp_server_api.models.googlerpc_status.googlerpcStatus( + code = 56, + message = '0', + details = [ + { + 'key' : None + } + ], ), ) + ], + type = 'ROOT', + type_attributes = kfp_server_api.models.pipeline_task_detail_type_attributes.PipelineTaskDetailTypeAttributes( + iteration_index = '0', + iteration_count = '0', ), + error = kfp_server_api.models.googlerpc_status.googlerpcStatus( + code = 56, + message = '0', ), + parent_task_id = '0', + child_tasks = [ + kfp_server_api.models.pipeline_task_detail_child_task.PipelineTaskDetailChildTask( + task_id = '0', + name = '0', ) + ], + inputs = kfp_server_api.models.pipeline_task_detail_input_outputs.PipelineTaskDetailInputOutputs( + parameters = [ + kfp_server_api.models.input_outputs_io_parameter.InputOutputsIOParameter( + value = kfp_server_api.models.value.value(), + parameter_key = '0', + producer = kfp_server_api.models.v2beta1_io_producer.v2beta1IOProducer( + task_name = '0', + iteration = '0', ), ) + ], + artifacts = [ + kfp_server_api.models.align_structure_with_executor_input.Align structure with Executor Input( + artifact_key = '0', ) + ], ), + outputs = kfp_server_api.models.pipeline_task_detail_input_outputs.PipelineTaskDetailInputOutputs( + parameters = [ + kfp_server_api.models.input_outputs_io_parameter.InputOutputsIOParameter( + value = kfp_server_api.models.value.value(), + parameter_key = '0', ) + ], ), + scope_path = [ + '0' + ], ) + ], + next_page_token = '0', + total_size = 56 + ) + else : + return V2beta1ListTasksResponse( + ) + + def testV2beta1ListTasksResponse(self): + """Test V2beta1ListTasksResponse""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_pipeline_task_detail.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_pipeline_task_detail.py index a1fe3ce24cf..8c6e2830747 100644 --- a/backend/api/v2beta1/python_http_client/test/test_v2beta1_pipeline_task_detail.py +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_pipeline_task_detail.py @@ -36,46 +36,28 @@ def make_instance(self, include_optional): # model = kfp_server_api.models.v2beta1_pipeline_task_detail.V2beta1PipelineTaskDetail() # noqa: E501 if include_optional : return V2beta1PipelineTaskDetail( - run_id = '0', - task_id = '0', + name = '0', display_name = '0', + task_id = '0', + run_id = '0', + pods = [ + kfp_server_api.models.pipeline_task_detail_task_pod.PipelineTaskDetailTaskPod( + name = '0', + uid = '0', + type = 'UNSPECIFIED', ) + ], + cache_fingerprint = '0', create_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), start_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), end_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), - executor_detail = kfp_server_api.models.v2beta1_pipeline_task_executor_detail.v2beta1PipelineTaskExecutorDetail( - main_job = '0', - pre_caching_check_job = '0', - failed_main_jobs = [ - '0' - ], - failed_pre_caching_check_jobs = [ - '0' - ], ), state = 'RUNTIME_STATE_UNSPECIFIED', - execution_id = '0', - error = kfp_server_api.models.googlerpc_status.googlerpcStatus( - code = 56, + status_metadata = kfp_server_api.models.pipeline_task_detail_status_metadata.PipelineTaskDetailStatusMetadata( message = '0', - details = [ - { - 'key' : None - } - ], ), - inputs = { - 'key' : kfp_server_api.models.v2beta1_artifact_list.v2beta1ArtifactList( - artifact_ids = [ - '0' - ], ) - }, - outputs = { - 'key' : kfp_server_api.models.v2beta1_artifact_list.v2beta1ArtifactList( - artifact_ids = [ - '0' - ], ) - }, - parent_task_id = '0', + custom_properties = { + 'key' : None + }, ), state_history = [ - kfp_server_api.models.v2beta1_runtime_status.v2beta1RuntimeStatus( + kfp_server_api.models.pipeline_task_detail_task_status.PipelineTaskDetailTaskStatus( update_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), state = 'RUNTIME_STATE_UNSPECIFIED', error = kfp_server_api.models.googlerpc_status.googlerpcStatus( @@ -87,11 +69,54 @@ def make_instance(self, include_optional): } ], ), ) ], - pod_name = '0', + type = 'ROOT', + type_attributes = kfp_server_api.models.pipeline_task_detail_type_attributes.PipelineTaskDetailTypeAttributes( + iteration_index = '0', + iteration_count = '0', ), + error = kfp_server_api.models.googlerpc_status.googlerpcStatus( + code = 56, + message = '0', + details = [ + { + 'key' : None + } + ], ), + parent_task_id = '0', child_tasks = [ kfp_server_api.models.pipeline_task_detail_child_task.PipelineTaskDetailChildTask( task_id = '0', - pod_name = '0', ) + name = '0', ) + ], + inputs = kfp_server_api.models.pipeline_task_detail_input_outputs.PipelineTaskDetailInputOutputs( + parameters = [ + kfp_server_api.models.input_outputs_io_parameter.InputOutputsIOParameter( + value = kfp_server_api.models.value.value(), + type = 'UNSPECIFIED', + parameter_key = '0', + producer = kfp_server_api.models.v2beta1_io_producer.v2beta1IOProducer( + task_name = '0', + iteration = '0', ), ) + ], + artifacts = [ + kfp_server_api.models.align_structure_with_executor_input.Align structure with Executor Input( + artifact_key = '0', ) + ], ), + outputs = kfp_server_api.models.pipeline_task_detail_input_outputs.PipelineTaskDetailInputOutputs( + parameters = [ + kfp_server_api.models.input_outputs_io_parameter.InputOutputsIOParameter( + value = kfp_server_api.models.value.value(), + type = 'UNSPECIFIED', + parameter_key = '0', + producer = kfp_server_api.models.v2beta1_io_producer.v2beta1IOProducer( + task_name = '0', + iteration = '0', ), ) + ], + artifacts = [ + kfp_server_api.models.align_structure_with_executor_input.Align structure with Executor Input( + artifact_key = '0', ) + ], ), + scope_path = [ + '0' ] ) else : diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_run.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_run.py index bcdd39a53d1..7568dbfb22d 100644 --- a/backend/api/v2beta1/python_http_client/test/test_v2beta1_run.py +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_run.py @@ -69,50 +69,72 @@ def make_instance(self, include_optional): pipeline_run_context_id = '0', task_details = [ kfp_server_api.models.v2beta1_pipeline_task_detail.v2beta1PipelineTaskDetail( - run_id = '0', - task_id = '0', + name = '0', display_name = '0', + task_id = '0', + run_id = '0', + pods = [ + kfp_server_api.models.pipeline_task_detail_task_pod.PipelineTaskDetailTaskPod( + name = '0', + uid = '0', + type = 'UNSPECIFIED', ) + ], + cache_fingerprint = '0', create_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), start_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), end_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), - executor_detail = kfp_server_api.models.v2beta1_pipeline_task_executor_detail.v2beta1PipelineTaskExecutorDetail( - main_job = '0', - pre_caching_check_job = '0', - failed_main_jobs = [ - '0' - ], - failed_pre_caching_check_jobs = [ - '0' - ], ), state = 'RUNTIME_STATE_UNSPECIFIED', - execution_id = '0', - error = kfp_server_api.models.googlerpc_status.googlerpcStatus( - code = 56, + status_metadata = kfp_server_api.models.pipeline_task_detail_status_metadata.PipelineTaskDetailStatusMetadata( message = '0', - details = [ - { - 'key' : None - } - ], ), - inputs = { - 'key' : kfp_server_api.models.v2beta1_artifact_list.v2beta1ArtifactList( - artifact_ids = [ - '0' - ], ) - }, - outputs = { - 'key' : kfp_server_api.models.v2beta1_artifact_list.v2beta1ArtifactList() - }, - parent_task_id = '0', + custom_properties = { + 'key' : None + }, ), state_history = [ - kfp_server_api.models.v2beta1_runtime_status.v2beta1RuntimeStatus( - update_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), ) + kfp_server_api.models.pipeline_task_detail_task_status.PipelineTaskDetailTaskStatus( + update_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + error = kfp_server_api.models.googlerpc_status.googlerpcStatus( + code = 56, + message = '0', + details = [ + { + 'key' : None + } + ], ), ) ], - pod_name = '0', + type = 'ROOT', + type_attributes = kfp_server_api.models.pipeline_task_detail_type_attributes.PipelineTaskDetailTypeAttributes( + iteration_index = '0', + iteration_count = '0', ), + error = kfp_server_api.models.googlerpc_status.googlerpcStatus( + code = 56, + message = '0', ), + parent_task_id = '0', child_tasks = [ kfp_server_api.models.pipeline_task_detail_child_task.PipelineTaskDetailChildTask( task_id = '0', - pod_name = '0', ) + name = '0', ) + ], + inputs = kfp_server_api.models.pipeline_task_detail_input_outputs.PipelineTaskDetailInputOutputs( + parameters = [ + kfp_server_api.models.input_outputs_io_parameter.InputOutputsIOParameter( + value = kfp_server_api.models.value.value(), + parameter_key = '0', + producer = kfp_server_api.models.v2beta1_io_producer.v2beta1IOProducer( + task_name = '0', + iteration = '0', ), ) + ], + artifacts = [ + kfp_server_api.models.align_structure_with_executor_input.Align structure with Executor Input( + artifact_key = '0', ) + ], ), + outputs = kfp_server_api.models.pipeline_task_detail_input_outputs.PipelineTaskDetailInputOutputs( + parameters = [ + kfp_server_api.models.input_outputs_io_parameter.InputOutputsIOParameter( + value = kfp_server_api.models.value.value(), + parameter_key = '0', ) + ], ), + scope_path = [ + '0' ], ) ], ), recurring_run_id = '0', @@ -128,6 +150,80 @@ def make_instance(self, include_optional): 'key' : None } ], ), ) + ], + pipeline_reference = kfp_server_api.models.v2beta1_pipeline_version_reference.v2beta1PipelineVersionReference( + pipeline_id = '0', + pipeline_version_id = '0', ), + task_count = 56, + tasks = [ + kfp_server_api.models.v2beta1_pipeline_task_detail.v2beta1PipelineTaskDetail( + name = '0', + display_name = '0', + task_id = '0', + run_id = '0', + pods = [ + kfp_server_api.models.pipeline_task_detail_task_pod.PipelineTaskDetailTaskPod( + name = '0', + uid = '0', + type = 'UNSPECIFIED', ) + ], + cache_fingerprint = '0', + create_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + start_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + end_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + state = 'RUNTIME_STATE_UNSPECIFIED', + status_metadata = kfp_server_api.models.pipeline_task_detail_status_metadata.PipelineTaskDetailStatusMetadata( + message = '0', + custom_properties = { + 'key' : None + }, ), + state_history = [ + kfp_server_api.models.pipeline_task_detail_task_status.PipelineTaskDetailTaskStatus( + update_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + error = kfp_server_api.models.googlerpc_status.googlerpcStatus( + code = 56, + message = '0', + details = [ + { + 'key' : None + } + ], ), ) + ], + type = 'ROOT', + type_attributes = kfp_server_api.models.pipeline_task_detail_type_attributes.PipelineTaskDetailTypeAttributes( + iteration_index = '0', + iteration_count = '0', ), + error = kfp_server_api.models.googlerpc_status.googlerpcStatus( + code = 56, + message = '0', ), + parent_task_id = '0', + child_tasks = [ + kfp_server_api.models.pipeline_task_detail_child_task.PipelineTaskDetailChildTask( + task_id = '0', + name = '0', ) + ], + inputs = kfp_server_api.models.pipeline_task_detail_input_outputs.PipelineTaskDetailInputOutputs( + parameters = [ + kfp_server_api.models.input_outputs_io_parameter.InputOutputsIOParameter( + value = kfp_server_api.models.value.value(), + parameter_key = '0', + producer = kfp_server_api.models.v2beta1_io_producer.v2beta1IOProducer( + task_name = '0', + iteration = '0', ), ) + ], + artifacts = [ + kfp_server_api.models.align_structure_with_executor_input.Align structure with Executor Input( + artifact_key = '0', ) + ], ), + outputs = kfp_server_api.models.pipeline_task_detail_input_outputs.PipelineTaskDetailInputOutputs( + parameters = [ + kfp_server_api.models.input_outputs_io_parameter.InputOutputsIOParameter( + value = kfp_server_api.models.value.value(), + parameter_key = '0', ) + ], ), + scope_path = [ + '0' + ], ) ] ) else : diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_run_details.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_run_details.py index 25c68518d8c..8a8d40ff082 100644 --- a/backend/api/v2beta1/python_http_client/test/test_v2beta1_run_details.py +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_run_details.py @@ -40,50 +40,72 @@ def make_instance(self, include_optional): pipeline_run_context_id = '0', task_details = [ kfp_server_api.models.v2beta1_pipeline_task_detail.v2beta1PipelineTaskDetail( - run_id = '0', - task_id = '0', + name = '0', display_name = '0', + task_id = '0', + run_id = '0', + pods = [ + kfp_server_api.models.pipeline_task_detail_task_pod.PipelineTaskDetailTaskPod( + name = '0', + uid = '0', + type = 'UNSPECIFIED', ) + ], + cache_fingerprint = '0', create_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), start_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), end_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), - executor_detail = kfp_server_api.models.v2beta1_pipeline_task_executor_detail.v2beta1PipelineTaskExecutorDetail( - main_job = '0', - pre_caching_check_job = '0', - failed_main_jobs = [ - '0' - ], - failed_pre_caching_check_jobs = [ - '0' - ], ), state = 'RUNTIME_STATE_UNSPECIFIED', - execution_id = '0', - error = kfp_server_api.models.googlerpc_status.googlerpcStatus( - code = 56, + status_metadata = kfp_server_api.models.pipeline_task_detail_status_metadata.PipelineTaskDetailStatusMetadata( message = '0', - details = [ - { - 'key' : None - } - ], ), - inputs = { - 'key' : kfp_server_api.models.v2beta1_artifact_list.v2beta1ArtifactList( - artifact_ids = [ - '0' - ], ) - }, - outputs = { - 'key' : kfp_server_api.models.v2beta1_artifact_list.v2beta1ArtifactList() - }, - parent_task_id = '0', + custom_properties = { + 'key' : None + }, ), state_history = [ - kfp_server_api.models.v2beta1_runtime_status.v2beta1RuntimeStatus( - update_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), ) + kfp_server_api.models.pipeline_task_detail_task_status.PipelineTaskDetailTaskStatus( + update_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + error = kfp_server_api.models.googlerpc_status.googlerpcStatus( + code = 56, + message = '0', + details = [ + { + 'key' : None + } + ], ), ) ], - pod_name = '0', + type = 'ROOT', + type_attributes = kfp_server_api.models.pipeline_task_detail_type_attributes.PipelineTaskDetailTypeAttributes( + iteration_index = '0', + iteration_count = '0', ), + error = kfp_server_api.models.googlerpc_status.googlerpcStatus( + code = 56, + message = '0', ), + parent_task_id = '0', child_tasks = [ kfp_server_api.models.pipeline_task_detail_child_task.PipelineTaskDetailChildTask( task_id = '0', - pod_name = '0', ) + name = '0', ) + ], + inputs = kfp_server_api.models.pipeline_task_detail_input_outputs.PipelineTaskDetailInputOutputs( + parameters = [ + kfp_server_api.models.input_outputs_io_parameter.InputOutputsIOParameter( + value = kfp_server_api.models.value.value(), + parameter_key = '0', + producer = kfp_server_api.models.v2beta1_io_producer.v2beta1IOProducer( + task_name = '0', + iteration = '0', ), ) + ], + artifacts = [ + kfp_server_api.models.align_structure_with_executor_input.Align structure with Executor Input( + artifact_key = '0', ) + ], ), + outputs = kfp_server_api.models.pipeline_task_detail_input_outputs.PipelineTaskDetailInputOutputs( + parameters = [ + kfp_server_api.models.input_outputs_io_parameter.InputOutputsIOParameter( + value = kfp_server_api.models.value.value(), + parameter_key = '0', ) + ], ), + scope_path = [ + '0' ], ) ] ) diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_update_tasks_bulk_request.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_update_tasks_bulk_request.py new file mode 100644 index 00000000000..e06c2f92088 --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_update_tasks_bulk_request.py @@ -0,0 +1,121 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.v2beta1_update_tasks_bulk_request import V2beta1UpdateTasksBulkRequest # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestV2beta1UpdateTasksBulkRequest(unittest.TestCase): + """V2beta1UpdateTasksBulkRequest unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test V2beta1UpdateTasksBulkRequest + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.v2beta1_update_tasks_bulk_request.V2beta1UpdateTasksBulkRequest() # noqa: E501 + if include_optional : + return V2beta1UpdateTasksBulkRequest( + tasks = { + 'key' : kfp_server_api.models.v2beta1_pipeline_task_detail.v2beta1PipelineTaskDetail( + name = '0', + display_name = '0', + task_id = '0', + run_id = '0', + pods = [ + kfp_server_api.models.pipeline_task_detail_task_pod.PipelineTaskDetailTaskPod( + name = '0', + uid = '0', + type = 'UNSPECIFIED', ) + ], + cache_fingerprint = '0', + create_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + start_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + end_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + state = 'RUNTIME_STATE_UNSPECIFIED', + status_metadata = kfp_server_api.models.pipeline_task_detail_status_metadata.PipelineTaskDetailStatusMetadata( + message = '0', + custom_properties = { + 'key' : None + }, ), + state_history = [ + kfp_server_api.models.pipeline_task_detail_task_status.PipelineTaskDetailTaskStatus( + update_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + error = kfp_server_api.models.googlerpc_status.googlerpcStatus( + code = 56, + message = '0', + details = [ + { + 'key' : None + } + ], ), ) + ], + type = 'ROOT', + type_attributes = kfp_server_api.models.pipeline_task_detail_type_attributes.PipelineTaskDetailTypeAttributes( + iteration_index = '0', + iteration_count = '0', ), + error = kfp_server_api.models.googlerpc_status.googlerpcStatus( + code = 56, + message = '0', ), + parent_task_id = '0', + child_tasks = [ + kfp_server_api.models.pipeline_task_detail_child_task.PipelineTaskDetailChildTask( + task_id = '0', + name = '0', ) + ], + inputs = kfp_server_api.models.pipeline_task_detail_input_outputs.PipelineTaskDetailInputOutputs( + parameters = [ + kfp_server_api.models.input_outputs_io_parameter.InputOutputsIOParameter( + value = kfp_server_api.models.value.value(), + parameter_key = '0', + producer = kfp_server_api.models.v2beta1_io_producer.v2beta1IOProducer( + task_name = '0', + iteration = '0', ), ) + ], + artifacts = [ + kfp_server_api.models.align_structure_with_executor_input.Align structure with Executor Input( + artifact_key = '0', ) + ], ), + outputs = kfp_server_api.models.pipeline_task_detail_input_outputs.PipelineTaskDetailInputOutputs( + parameters = [ + kfp_server_api.models.input_outputs_io_parameter.InputOutputsIOParameter( + value = kfp_server_api.models.value.value(), + parameter_key = '0', ) + ], ), + scope_path = [ + '0' + ], ) + } + ) + else : + return V2beta1UpdateTasksBulkRequest( + ) + + def testV2beta1UpdateTasksBulkRequest(self): + """Test V2beta1UpdateTasksBulkRequest""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/python_http_client/test/test_v2beta1_update_tasks_bulk_response.py b/backend/api/v2beta1/python_http_client/test/test_v2beta1_update_tasks_bulk_response.py new file mode 100644 index 00000000000..26223adcfed --- /dev/null +++ b/backend/api/v2beta1/python_http_client/test/test_v2beta1_update_tasks_bulk_response.py @@ -0,0 +1,121 @@ +# coding: utf-8 + +""" + Kubeflow Pipelines API + + This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. + + Contact: kubeflow-pipelines@google.com + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import kfp_server_api +from kfp_server_api.models.v2beta1_update_tasks_bulk_response import V2beta1UpdateTasksBulkResponse # noqa: E501 +from kfp_server_api.rest import ApiException + +class TestV2beta1UpdateTasksBulkResponse(unittest.TestCase): + """V2beta1UpdateTasksBulkResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test V2beta1UpdateTasksBulkResponse + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = kfp_server_api.models.v2beta1_update_tasks_bulk_response.V2beta1UpdateTasksBulkResponse() # noqa: E501 + if include_optional : + return V2beta1UpdateTasksBulkResponse( + tasks = { + 'key' : kfp_server_api.models.v2beta1_pipeline_task_detail.v2beta1PipelineTaskDetail( + name = '0', + display_name = '0', + task_id = '0', + run_id = '0', + pods = [ + kfp_server_api.models.pipeline_task_detail_task_pod.PipelineTaskDetailTaskPod( + name = '0', + uid = '0', + type = 'UNSPECIFIED', ) + ], + cache_fingerprint = '0', + create_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + start_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + end_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + state = 'RUNTIME_STATE_UNSPECIFIED', + status_metadata = kfp_server_api.models.pipeline_task_detail_status_metadata.PipelineTaskDetailStatusMetadata( + message = '0', + custom_properties = { + 'key' : None + }, ), + state_history = [ + kfp_server_api.models.pipeline_task_detail_task_status.PipelineTaskDetailTaskStatus( + update_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + error = kfp_server_api.models.googlerpc_status.googlerpcStatus( + code = 56, + message = '0', + details = [ + { + 'key' : None + } + ], ), ) + ], + type = 'ROOT', + type_attributes = kfp_server_api.models.pipeline_task_detail_type_attributes.PipelineTaskDetailTypeAttributes( + iteration_index = '0', + iteration_count = '0', ), + error = kfp_server_api.models.googlerpc_status.googlerpcStatus( + code = 56, + message = '0', ), + parent_task_id = '0', + child_tasks = [ + kfp_server_api.models.pipeline_task_detail_child_task.PipelineTaskDetailChildTask( + task_id = '0', + name = '0', ) + ], + inputs = kfp_server_api.models.pipeline_task_detail_input_outputs.PipelineTaskDetailInputOutputs( + parameters = [ + kfp_server_api.models.input_outputs_io_parameter.InputOutputsIOParameter( + value = kfp_server_api.models.value.value(), + parameter_key = '0', + producer = kfp_server_api.models.v2beta1_io_producer.v2beta1IOProducer( + task_name = '0', + iteration = '0', ), ) + ], + artifacts = [ + kfp_server_api.models.align_structure_with_executor_input.Align structure with Executor Input( + artifact_key = '0', ) + ], ), + outputs = kfp_server_api.models.pipeline_task_detail_input_outputs.PipelineTaskDetailInputOutputs( + parameters = [ + kfp_server_api.models.input_outputs_io_parameter.InputOutputsIOParameter( + value = kfp_server_api.models.value.value(), + parameter_key = '0', ) + ], ), + scope_path = [ + '0' + ], ) + } + ) + else : + return V2beta1UpdateTasksBulkResponse( + ) + + def testV2beta1UpdateTasksBulkResponse(self): + """Test V2beta1UpdateTasksBulkResponse""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/backend/api/v2beta1/run.proto b/backend/api/v2beta1/run.proto index 4825286e45e..1bf6eb054e8 100644 --- a/backend/api/v2beta1/run.proto +++ b/backend/api/v2beta1/run.proto @@ -24,6 +24,7 @@ import "google/protobuf/struct.proto"; import "google/rpc/status.proto"; import "protoc-gen-openapiv2/options/annotations.proto"; import "backend/api/v2beta1/runtime_config.proto"; +import "backend/api/v2beta1/artifact.proto"; option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { schemes: [1, 2], // http + https @@ -127,6 +128,69 @@ service RunService { }; } + rpc CreateTask(CreateTaskRequest) returns (PipelineTaskDetail) { + option (google.api.http) = { + post: "/apis/v2beta1/tasks" + body: "task" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "create_task" + summary: "Creates a new task." + tags: "RunService" + }; + } + + // Update the task with the provided task details. + // Update Task uses merge semantics for Parameters and does not + // over-write them. This is to accommodate driver/launcher usage + // for asynchronous writes to the same task (e.g. during + // back propagation). Merging parameters avoids encountering + // race conditions. + rpc UpdateTask(UpdateTaskRequest) returns (PipelineTaskDetail) { + option (google.api.http) = { + patch: "/apis/v2beta1/tasks/{task_id}" + body: "task" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "update_task" + summary: "Updates an existing task." + tags: "RunService" + }; + } + + rpc UpdateTasksBulk(UpdateTasksBulkRequest) returns (UpdateTasksBulkResponse) { + option (google.api.http) = { + post: "/apis/v2beta1/tasks:batchUpdate" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "batch_update_tasks" + summary: "Updates multiple tasks in bulk." + tags: "RunService" + }; + } + + rpc GetTask(GetTaskRequest) returns (PipelineTaskDetail) { + option (google.api.http) = { + get: "/apis/v2beta1/tasks/{task_id}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "get_task" + summary: "Gets a specific task by ID." + tags: "RunService" + }; + } + + rpc ListTasks(ListTasksRequest) returns (ListTasksResponse) { + option (google.api.http) = { + get: "/apis/v2beta1/tasks" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "list_tasks" + summary: "Lists tasks with optional filtering." + tags: "RunService" + }; + } } message Run { @@ -198,7 +262,8 @@ message Run { google.rpc.Status error = 14; // Output. Runtime details of a run. - RunDetails run_details = 15; + // Either remove or deprecate this + RunDetails run_details = 15 [deprecated = true]; // ID of the recurring run that triggered this run. string recurring_run_id = 16; @@ -206,6 +271,13 @@ message Run { // Output. A sequence of run statuses. This field keeps a record // of state transitions. repeated RuntimeStatus state_history = 17; + + // Output only. Reference to the pipeline used for this run. + PipelineVersionReference pipeline_reference = 19; + + int32 task_count = 20; + + repeated PipelineTaskDetail tasks = 21; } // Reference to an existing pipeline version. @@ -278,71 +350,172 @@ message RunDetails { // Runtime information of a task execution. message PipelineTaskDetail { - // ID of the parent run. - string run_id = 1; + string name = 1; + // User specified name of a task that is defined in + // [Pipeline.spec][]. + string display_name = 2; // System-generated ID of a task. - string task_id = 2; + string task_id = 3; - // User specified name of a task that is defined in - // [Pipeline.spec][]. - string display_name = 3; + // ID of the parent run. + string run_id = 4; + + enum TaskPodType { + UNSPECIFIED = 0; + DRIVER = 1; + EXECUTOR = 2; + } + message TaskPod { + string name = 1; + string uid = 2; + TaskPodType type = 3; + } + repeated TaskPod pods = 5; + + string cache_fingerprint = 6; // Creation time of a task. - google.protobuf.Timestamp create_time = 4; + google.protobuf.Timestamp create_time = 7; // Starting time of a task. - google.protobuf.Timestamp start_time = 5; + google.protobuf.Timestamp start_time = 8; // Completion time of a task. - google.protobuf.Timestamp end_time = 6; + google.protobuf.Timestamp end_time = 9; - // Execution information of a task. - PipelineTaskExecutorDetail executor_detail = 7; + // Runtime state of a Task + enum TaskState { + // Default value. This value is not used. + RUNTIME_STATE_UNSPECIFIED = 0; - // Runtime state of a task. - RuntimeState state = 8; + // Entity execution is in progress. + RUNNING = 1; - // Execution id of the corresponding entry in ML metadata store. - int64 execution_id = 9; + // Entity completed successfully. + SUCCEEDED = 2; - // The error that occurred during task execution. - // Only populated when the task is in FAILED or CANCELED state. - google.rpc.Status error = 10; + // Entity has been skipped. For example, due to caching. + SKIPPED = 3; - // Input artifacts of the task. - map inputs = 11; + // Entity execution has failed. + FAILED = 4; - // Output artifacts of the task. - map outputs = 12; + CACHED = 5; + } + TaskState state = 10; - // ID of the parent task if the task is within a component scope. - // Empty if the task is at the root level. - string parent_task_id = 13; + + message StatusMetadata { + // KFP Backend will populate this field with error messages + // if any are available on a Failed task. + string message = 1; + // Custom status metadata, this can be used to provide + // additional status info for a given task during runtime + // This is currently not utilized by KFP backend. + map custom_properties = 2; + } + StatusMetadata status_metadata = 11; + + // Timestamped representation of a Task state with an optional error. + message TaskStatus { + google.protobuf.Timestamp update_time = 1; + TaskState state = 2; + google.rpc.Status error = 3; + } // A sequence of task statuses. This field keeps a record // of state transitions. - repeated RuntimeStatus state_history = 14; + repeated TaskStatus state_history = 12; + + enum TaskType { + // Root task is the top ancestor task to all tasks in the pipeline run + // It is the only task with no parent task in a Pipeline Run. + ROOT = 0; + // All child tasks in the Run DAG are Runtime tasks. With the exception + // of K8S driver pods. + // These tasks are the only tasks that have Executor Pods. + RUNTIME = 1; + // Condition Branch is the wrapper task of an If block + CONDITION_BRANCH = 2; + // Condition is an individual "if" branch, and is + // a child to a CONDITION_BRANCH task. + CONDITION = 3; + // Task Group for CONDITION_BRANCH + // Task Group for RUNTIME Loop Iterations + LOOP = 4; + EXIT_HANDLER = 5; + IMPORTER = 6; + // Generic DAG task type for types like Nested Pipelines + // where there is no declarative way to detect this within + // a driver. + DAG = 7; + } + TaskType type = 13; + + message TypeAttributes { + // Optional. Applies to type Runtime that is an iteration + optional int64 iteration_index = 1; + // Optional. Applies to type LOOP + optional int64 iteration_count = 2; + } - // Name of the corresponding pod assigned by the orchestration engine. - // Also known as node_id. - string pod_name = 15; + TypeAttributes type_attributes = 14; + + // The error that occurred during task execution. + // Only populated when the task is in FAILED or CANCELED state. + google.rpc.Status error = 15; + + // ID of the parent task if the task is within a component scope. + // Empty if the task is at the root level. + optional string parent_task_id = 16; // A dependent task that requires this one to succeed. // Represented by either task_id or pod_name. message ChildTask { - oneof child_task { - // System-generated ID of a task. - string task_id = 1; + // System-generated ID of a task. + string task_id = 1; + string name = 2; + } - // Name of the corresponding pod assigned by the orchestration engine. - // Also known as node_id. - string pod_name = 2; + // Sequence of dependent tasks. + repeated ChildTask child_tasks = 17; + + message InputOutputs { + message IOParameter { + google.protobuf.Value value = 1; + IOType type = 2; + string parameter_key = 3; + // This field is optional because in the case of + // Input RuntimeValues, ComponentDefaultInputs, + // and Raw Iterator Input there are no producers. + optional IOProducer producer = 4; } - } - // Sequence of dependen tasks. - repeated ChildTask child_tasks = 16; + // Align structure with Executor Input + message IOArtifact { + repeated Artifact artifacts = 1; + IOType type = 2; + string artifact_key = 3; + IOProducer producer = 4; + } + // For Loops parameters are filled with resolved + // parameterIterator.items + repeated IOParameter parameters = 1; + + // Output Only. To create Artifacts for a task use + // ArtifactTasks to link artifacts to tasks. + repeated IOArtifact artifacts = 2; + } + InputOutputs inputs = 18; + InputOutputs outputs = 19; + + // The scope of this task within the + // pipeline spec. Each entry represents + // either a Dag Task or a Container task. + // Note that Container task will are + // always the last entry in a scope_path. + repeated string scope_path = 20; } // Runtime information of a pipeline task executor. @@ -388,6 +561,20 @@ message GetRunRequest { // The ID of the run to be retrieved. string run_id = 2; + + enum ViewMode { + // By default `tasks` field is omitted. + // This provides a faster and leaner run object. + DEFAULT = 0; + + // This view mode displays all the tasks for this run + // with all its fields populated. + FULL = 1; + } + + // Optional view mode. This field can be used to adjust + // how detailed the Run object that is returned will be. + optional ViewMode view = 3; } message ListRunsRequest { @@ -414,6 +601,20 @@ message ListRunsRequest { // A url-encoded, JSON-serialized Filter protocol buffer (see // [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). string filter = 6; + + enum ViewMode { + // By default `tasks` field is omitted. + // This provides a faster and leaner run object. + DEFAULT = 0; + + // This view mode displays all the tasks for this run + // with all its fields populated. + FULL = 1; + } + + // Optional view mode. This field can be used to adjust + // how detailed the Run object that is returned will be. + optional ViewMode view = 7; } message TerminateRunRequest { @@ -484,4 +685,53 @@ message RetryRunRequest { // The ID of the run to be retried. string run_id = 2; +} + +message CreateTaskRequest { + PipelineTaskDetail task = 1; +} + +message UpdateTaskRequest { + string task_id = 1; + PipelineTaskDetail task = 2; +} + +message UpdateTasksBulkRequest { + // Required. Map of task ID to task detail for bulk update. + // Key: task_id, Value: PipelineTaskDetail to update + map tasks = 1; +} + +message UpdateTasksBulkResponse { + // Map of task ID to updated task detail. + // Key: task_id, Value: Updated PipelineTaskDetail + map tasks = 1; +} + +message GetTaskRequest { + string task_id = 1; +} + +message ListTasksRequest { + // Required. Must specify either parent_id, run_id, namespace to filter tasks. + oneof parent_filter { + // List all tasks with this parent task. + string parent_id = 1; + // List all tasks for this run. + string run_id = 2; + // List all tasks in this namespace. + // The primary use case for this filter is to detect cache hits. + string namespace = 3; + } + + int32 page_size = 4; + string page_token = 5; + string filter = 6; + string order_by = 7; +} + +message ListTasksResponse { + repeated PipelineTaskDetail tasks = 1; + string next_page_token = 2; + int32 total_size = 3; } \ No newline at end of file diff --git a/backend/api/v2beta1/swagger/artifact.swagger.json b/backend/api/v2beta1/swagger/artifact.swagger.json new file mode 100644 index 00000000000..539f30d272a --- /dev/null +++ b/backend/api/v2beta1/swagger/artifact.swagger.json @@ -0,0 +1,655 @@ +{ + "swagger": "2.0", + "info": { + "title": "backend/api/v2beta1/artifact.proto", + "version": "version not set" + }, + "tags": [ + { + "name": "ArtifactService" + } + ], + "schemes": [ + "http", + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "paths": { + "/apis/v2beta1/artifact_tasks": { + "get": { + "summary": "Lists artifact-task relationships.", + "operationId": "list_artifact_tasks", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1ListArtifactTasksResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "task_ids", + "description": "Optional, filter artifact task by a set of task_ids", + "in": "query", + "required": false, + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "multi" + }, + { + "name": "run_ids", + "description": "Optional, filter artifact task by a set of run_ids", + "in": "query", + "required": false, + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "multi" + }, + { + "name": "artifact_ids", + "description": "Optional, filter artifact task by a set of artifact_ids", + "in": "query", + "required": false, + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "multi" + }, + { + "name": "type", + "description": "Optional. Only list artifact tasks that have artifacts of this type.\n\n - UNSPECIFIED: For validation\n - COMPONENT_DEFAULT_INPUT: This is used for inputs that are\nprovided via default parameters in\nthe component input definitions\n - TASK_OUTPUT_INPUT: This is used for inputs that are\nprovided via upstream tasks.\nIn the sdk this appears as:\nTaskInputsSpec.kind.task_output_parameter\n\u0026 TaskInputsSpec.kind.task_output_artifact\n - COMPONENT_INPUT: Used for inputs that are\npassed from parent tasks.\n - RUNTIME_VALUE_INPUT: Hardcoded values passed\nas arguments to the task.\n - COLLECTED_INPUTS: Used for dsl.Collected\nUsage of this type indicates that all\nArtifacts within the IOArtifact.artifacts\nare inputs collected from sub tasks with\nITERATOR_OUTPUT outputs.\n - ITERATOR_INPUT: In a for loop task, introduced via ParallelFor, this type\nis used to indicate whether this resolved input belongs\nto a parameterIterator or artifactIterator.\nIn such a case the \"artifacts\" field for IOArtifact.artifacts\nis the list of resolved items for this parallelFor.\n - ITERATOR_INPUT_RAW: Hardcoded iterator parameters.\nRaw Iterator inputs have no producer\n - ITERATOR_OUTPUT: When an output is produced by a Runtime Iteration Task\nThis value is use to differentiate between standard inputs\n - OUTPUT: All other output types fall under this type.\n - ONE_OF_OUTPUT: An output of a Conditions branch.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "UNSPECIFIED", + "COMPONENT_DEFAULT_INPUT", + "TASK_OUTPUT_INPUT", + "COMPONENT_INPUT", + "RUNTIME_VALUE_INPUT", + "COLLECTED_INPUTS", + "ITERATOR_INPUT", + "ITERATOR_INPUT_RAW", + "ITERATOR_OUTPUT", + "OUTPUT", + "ONE_OF_OUTPUT", + "TASK_FINAL_STATUS_OUTPUT" + ], + "default": "UNSPECIFIED" + }, + { + "name": "page_token", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "page_size", + "in": "query", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "sort_by", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filter", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "ArtifactService" + ] + }, + "post": { + "summary": "Creates an artifact-task relationship.", + "operationId": "create_artifact_task", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1ArtifactTask" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v2beta1CreateArtifactTaskRequest" + } + } + ], + "tags": [ + "ArtifactService" + ] + } + }, + "/apis/v2beta1/artifact_tasks:batchCreate": { + "post": { + "summary": "Creates multiple artifact-task relationships in bulk.", + "operationId": "batch_create_artifact_tasks", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1CreateArtifactTasksBulkResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v2beta1CreateArtifactTasksBulkRequest" + } + } + ], + "tags": [ + "ArtifactService" + ] + } + }, + "/apis/v2beta1/artifacts": { + "get": { + "summary": "Finds all artifacts within the specified namespace.", + "operationId": "list_artifacts", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1ListArtifactResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "namespace", + "description": "Optional input. Namespace for the artifacts.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "page_token", + "description": "A page token to request the results page.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "page_size", + "description": "The number of artifacts to be listed per page. If there are more artifacts\nthan this number, the response message will contain a valid value in the\nnextPageToken field.", + "in": "query", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "sort_by", + "description": "Sorting order in form of \"field_name\", \"field_name asc\" or \"field_name desc\".\nAscending by default.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filter", + "description": "A url-encoded, JSON-serialized filter protocol buffer (see\n[filter.proto](https://github.com/kubeflow/artifacts/blob/master/backend/api/filter.proto)).", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "ArtifactService" + ] + }, + "post": { + "summary": "Creates a new artifact.", + "operationId": "create_artifact", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1Artifact" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v2beta1CreateArtifactRequest" + } + } + ], + "tags": [ + "ArtifactService" + ] + } + }, + "/apis/v2beta1/artifacts/{artifact_id}": { + "get": { + "summary": "Finds a specific Artifact by ID.", + "operationId": "get_artifact", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1Artifact" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "artifact_id", + "description": "Required. The ID of the artifact to be retrieved.", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "ArtifactService" + ] + } + }, + "/apis/v2beta1/artifacts:batchCreate": { + "post": { + "summary": "Creates multiple artifacts in bulk.", + "operationId": "batch_create_artifacts", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1CreateArtifactsBulkResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v2beta1CreateArtifactsBulkRequest" + } + } + ], + "tags": [ + "ArtifactService" + ] + } + } + }, + "definitions": { + "ArtifactArtifactType": { + "type": "string", + "enum": [ + "TYPE_UNSPECIFIED", + "Artifact", + "Model", + "Dataset", + "HTML", + "Markdown", + "Metric", + "ClassificationMetric", + "SlicedClassificationMetric" + ], + "default": "TYPE_UNSPECIFIED", + "description": " - TYPE_UNSPECIFIED: default; treated as \"not set\"\nreject if unset." + }, + "googlerpcStatus": { + "type": "object", + "properties": { + "code": { + "type": "integer", + "format": "int32", + "description": "The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]." + }, + "message": { + "type": "string", + "description": "A developer-facing error message, which should be in English. Any\nuser-facing error message should be localized and sent in the\n[google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client." + }, + "details": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/protobufAny" + }, + "description": "A list of messages that carry the error details. There is a common set of\nmessage types for APIs to use." + } + }, + "description": "The `Status` type defines a logical error model that is suitable for\ndifferent programming environments, including REST APIs and RPC APIs. It is\nused by [gRPC](https://github.com/grpc). Each `Status` message contains\nthree pieces of data: error code, error message, and error details.\n\nYou can find out more about this error model and how to work with it in the\n[API Design Guide](https://cloud.google.com/apis/design/errors)." + }, + "protobufAny": { + "type": "object", + "properties": { + "@type": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + } + }, + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := \u0026pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "protobufNullValue": { + "type": "string", + "enum": [ + "NULL_VALUE" + ], + "default": "NULL_VALUE", + "description": "`NullValue` is a singleton enumeration to represent the null value for the\n`Value` type union.\n\nThe JSON representation for `NullValue` is JSON `null`.\n\n - NULL_VALUE: Null value." + }, + "v2beta1Artifact": { + "type": "object", + "properties": { + "artifact_id": { + "type": "string", + "title": "Output only. The unique server generated id of the artifact.\nNote: Updated id name to be consistent with other api naming patterns (with prefix)", + "readOnly": true + }, + "name": { + "type": "string", + "description": "Required. The client provided name of the artifact.\nNote: in MLMD when name was set, it had to be unique for that type_id\nthis restriction is removed here\nIf this is a \"Metric\" artifact, the name of the metric\nis treated as the Key in its K/V pair." + }, + "description": { + "type": "string" + }, + "type": { + "$ref": "#/definitions/ArtifactArtifactType", + "title": "Required. The name of an ArtifactType. E.g. Dataset" + }, + "uri": { + "type": "string", + "description": "The uniform resource identifier of the physical artifact.\nMay be empty if there is no physical artifact." + }, + "metadata": { + "type": "object", + "additionalProperties": {}, + "description": "Optional. User provided custom properties which are not defined by its type." + }, + "number_value": { + "type": "number", + "format": "double", + "title": "Used primarily for metrics" + }, + "created_at": { + "type": "string", + "format": "date-time", + "description": "Output only. Create time of the artifact in millisecond since epoch.\nNote: The type and name is updated from mlmd artifact to be consistent with other backend apis.", + "readOnly": true + }, + "namespace": { + "type": "string" + } + }, + "title": "Not to be confused with RuntimeArtifact in PipelineSpec" + }, + "v2beta1ArtifactTask": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Output only. The unique server generated id of the ArtifactTask.", + "readOnly": true + }, + "artifact_id": { + "type": "string" + }, + "run_id": { + "type": "string" + }, + "task_id": { + "type": "string" + }, + "type": { + "$ref": "#/definitions/v2beta1IOType" + }, + "producer": { + "$ref": "#/definitions/v2beta1IOProducer" + }, + "key": { + "type": "string" + } + }, + "title": "Describes a relationship link between Artifacts and Tasks" + }, + "v2beta1CreateArtifactRequest": { + "type": "object", + "properties": { + "artifact": { + "$ref": "#/definitions/v2beta1Artifact", + "description": "Required. The artifact to create." + }, + "run_id": { + "type": "string", + "description": "An artifact is always created in the context of a\nrun." + }, + "task_id": { + "type": "string", + "description": "The Task that is associated with the creation of this artifact." + }, + "producer_key": { + "type": "string", + "title": "The outgoing parameter name of this Artifact within this task's component spec.\nFor example:\ndef preprocess(my_output: dsl.Outputs[dsl.Artifact]):\n ...\nhere the producer_key == \"my_output\"\nNote that producer_task_name == task_name" + }, + "iteration_index": { + "type": "string", + "format": "int64", + "title": "If the producing task is in a parallelFor iteration\nthis field designates the iteration index" + }, + "type": { + "$ref": "#/definitions/v2beta1IOType" + } + } + }, + "v2beta1CreateArtifactTaskRequest": { + "type": "object", + "properties": { + "artifact_task": { + "$ref": "#/definitions/v2beta1ArtifactTask", + "description": "Required. The artifact-task relationship to create." + } + }, + "title": "Request to create an artifact-task relationship" + }, + "v2beta1CreateArtifactTasksBulkRequest": { + "type": "object", + "properties": { + "artifact_tasks": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v2beta1ArtifactTask" + }, + "description": "Required. The list of artifact-task relationships to create." + } + } + }, + "v2beta1CreateArtifactTasksBulkResponse": { + "type": "object", + "properties": { + "artifact_tasks": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v2beta1ArtifactTask" + }, + "description": "The list of created artifact-task relationships." + } + } + }, + "v2beta1CreateArtifactsBulkRequest": { + "type": "object", + "properties": { + "artifacts": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v2beta1CreateArtifactRequest" + }, + "description": "Required. The list of artifacts to create." + } + } + }, + "v2beta1CreateArtifactsBulkResponse": { + "type": "object", + "properties": { + "artifacts": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v2beta1Artifact" + }, + "description": "The list of created artifacts." + } + } + }, + "v2beta1IOProducer": { + "type": "object", + "properties": { + "task_name": { + "type": "string" + }, + "iteration": { + "type": "string", + "format": "int64", + "title": "When a source is from an iteration Runtime\ntask type inside a ParallelFor" + } + } + }, + "v2beta1IOType": { + "type": "string", + "enum": [ + "UNSPECIFIED", + "COMPONENT_DEFAULT_INPUT", + "TASK_OUTPUT_INPUT", + "COMPONENT_INPUT", + "RUNTIME_VALUE_INPUT", + "COLLECTED_INPUTS", + "ITERATOR_INPUT", + "ITERATOR_INPUT_RAW", + "ITERATOR_OUTPUT", + "OUTPUT", + "ONE_OF_OUTPUT", + "TASK_FINAL_STATUS_OUTPUT" + ], + "default": "UNSPECIFIED", + "description": "Describes the I/O relationship between\nArtifacts/Parameters and Tasks.\nThere are a couple of instances where\ninput/outputs have special types such\nas in the case of LoopArguments or\ndsl.Collected outputs.\n\n - UNSPECIFIED: For validation\n - COMPONENT_DEFAULT_INPUT: This is used for inputs that are\nprovided via default parameters in\nthe component input definitions\n - TASK_OUTPUT_INPUT: This is used for inputs that are\nprovided via upstream tasks.\nIn the sdk this appears as:\nTaskInputsSpec.kind.task_output_parameter\n\u0026 TaskInputsSpec.kind.task_output_artifact\n - COMPONENT_INPUT: Used for inputs that are\npassed from parent tasks.\n - RUNTIME_VALUE_INPUT: Hardcoded values passed\nas arguments to the task.\n - COLLECTED_INPUTS: Used for dsl.Collected\nUsage of this type indicates that all\nArtifacts within the IOArtifact.artifacts\nare inputs collected from sub tasks with\nITERATOR_OUTPUT outputs.\n - ITERATOR_INPUT: In a for loop task, introduced via ParallelFor, this type\nis used to indicate whether this resolved input belongs\nto a parameterIterator or artifactIterator.\nIn such a case the \"artifacts\" field for IOArtifact.artifacts\nis the list of resolved items for this parallelFor.\n - ITERATOR_INPUT_RAW: Hardcoded iterator parameters.\nRaw Iterator inputs have no producer\n - ITERATOR_OUTPUT: When an output is produced by a Runtime Iteration Task\nThis value is use to differentiate between standard inputs\n - OUTPUT: All other output types fall under this type.\n - ONE_OF_OUTPUT: An output of a Conditions branch." + }, + "v2beta1ListArtifactResponse": { + "type": "object", + "properties": { + "artifacts": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v2beta1Artifact" + }, + "description": "The list of artifacts returned." + }, + "total_size": { + "type": "integer", + "format": "int32", + "description": "The total number of artifacts available. This field is not always populated." + }, + "next_page_token": { + "type": "string", + "description": "A token to retrieve the next page of results, or empty if there are no\nmore results in the list." + } + } + }, + "v2beta1ListArtifactTasksResponse": { + "type": "object", + "properties": { + "artifact_tasks": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v2beta1ArtifactTask" + } + }, + "total_size": { + "type": "integer", + "format": "int32" + }, + "next_page_token": { + "type": "string" + } + } + } + }, + "securityDefinitions": { + "Bearer": { + "type": "apiKey", + "name": "Authorization", + "in": "header" + } + } +} diff --git a/backend/api/v2beta1/swagger/kfp_api_single_file.swagger.json b/backend/api/v2beta1/swagger/kfp_api_single_file.swagger.json index 90f5008c1db..2d9a780715c 100644 --- a/backend/api/v2beta1/swagger/kfp_api_single_file.swagger.json +++ b/backend/api/v2beta1/swagger/kfp_api_single_file.swagger.json @@ -30,6 +30,332 @@ "application/json" ], "paths": { + "/apis/v2beta1/artifact_tasks": { + "get": { + "summary": "Lists artifact-task relationships.", + "operationId": "list_artifact_tasks", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1ListArtifactTasksResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "task_ids", + "description": "Optional, filter artifact task by a set of task_ids", + "in": "query", + "required": false, + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "multi" + }, + { + "name": "run_ids", + "description": "Optional, filter artifact task by a set of run_ids", + "in": "query", + "required": false, + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "multi" + }, + { + "name": "artifact_ids", + "description": "Optional, filter artifact task by a set of artifact_ids", + "in": "query", + "required": false, + "type": "array", + "items": { + "type": "string" + }, + "collectionFormat": "multi" + }, + { + "name": "type", + "description": "Optional. Only list artifact tasks that have artifacts of this type.\n\n - UNSPECIFIED: For validation\n - COMPONENT_DEFAULT_INPUT: This is used for inputs that are\nprovided via default parameters in\nthe component input definitions\n - TASK_OUTPUT_INPUT: This is used for inputs that are\nprovided via upstream tasks.\nIn the sdk this appears as:\nTaskInputsSpec.kind.task_output_parameter\n& TaskInputsSpec.kind.task_output_artifact\n - COMPONENT_INPUT: Used for inputs that are\npassed from parent tasks.\n - RUNTIME_VALUE_INPUT: Hardcoded values passed\nas arguments to the task.\n - COLLECTED_INPUTS: Used for dsl.Collected\nUsage of this type indicates that all\nArtifacts within the IOArtifact.artifacts\nare inputs collected from sub tasks with\nITERATOR_OUTPUT outputs.\n - ITERATOR_INPUT: In a for loop task, introduced via ParallelFor, this type\nis used to indicate whether this resolved input belongs\nto a parameterIterator or artifactIterator.\nIn such a case the \"artifacts\" field for IOArtifact.artifacts\nis the list of resolved items for this parallelFor.\n - ITERATOR_INPUT_RAW: Hardcoded iterator parameters.\nRaw Iterator inputs have no producer\n - ITERATOR_OUTPUT: When an output is produced by a Runtime Iteration Task\nThis value is use to differentiate between standard inputs\n - OUTPUT: All other output types fall under this type.\n - ONE_OF_OUTPUT: An output of a Conditions branch.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "UNSPECIFIED", + "COMPONENT_DEFAULT_INPUT", + "TASK_OUTPUT_INPUT", + "COMPONENT_INPUT", + "RUNTIME_VALUE_INPUT", + "COLLECTED_INPUTS", + "ITERATOR_INPUT", + "ITERATOR_INPUT_RAW", + "ITERATOR_OUTPUT", + "OUTPUT", + "ONE_OF_OUTPUT", + "TASK_FINAL_STATUS_OUTPUT" + ], + "default": "UNSPECIFIED" + }, + { + "name": "page_token", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "page_size", + "in": "query", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "sort_by", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filter", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "ArtifactService" + ] + }, + "post": { + "summary": "Creates an artifact-task relationship.", + "operationId": "create_artifact_task", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1ArtifactTask" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v2beta1CreateArtifactTaskRequest" + } + } + ], + "tags": [ + "ArtifactService" + ] + } + }, + "/apis/v2beta1/artifact_tasks:batchCreate": { + "post": { + "summary": "Creates multiple artifact-task relationships in bulk.", + "operationId": "batch_create_artifact_tasks", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1CreateArtifactTasksBulkResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v2beta1CreateArtifactTasksBulkRequest" + } + } + ], + "tags": [ + "ArtifactService" + ] + } + }, + "/apis/v2beta1/artifacts": { + "get": { + "summary": "Finds all artifacts within the specified namespace.", + "operationId": "list_artifacts", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1ListArtifactResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "namespace", + "description": "Optional input. Namespace for the artifacts.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "page_token", + "description": "A page token to request the results page.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "page_size", + "description": "The number of artifacts to be listed per page. If there are more artifacts\nthan this number, the response message will contain a valid value in the\nnextPageToken field.", + "in": "query", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "sort_by", + "description": "Sorting order in form of \"field_name\", \"field_name asc\" or \"field_name desc\".\nAscending by default.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filter", + "description": "A url-encoded, JSON-serialized filter protocol buffer (see\n[filter.proto](https://github.com/kubeflow/artifacts/blob/master/backend/api/filter.proto)).", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "ArtifactService" + ] + }, + "post": { + "summary": "Creates a new artifact.", + "operationId": "create_artifact", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1Artifact" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v2beta1CreateArtifactRequest" + } + } + ], + "tags": [ + "ArtifactService" + ] + } + }, + "/apis/v2beta1/artifacts/{artifact_id}": { + "get": { + "summary": "Finds a specific Artifact by ID.", + "operationId": "get_artifact", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1Artifact" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "artifact_id", + "description": "Required. The ID of the artifact to be retrieved.", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "ArtifactService" + ] + } + }, + "/apis/v2beta1/artifacts:batchCreate": { + "post": { + "summary": "Creates multiple artifacts in bulk.", + "operationId": "batch_create_artifacts", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1CreateArtifactsBulkResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v2beta1CreateArtifactsBulkRequest" + } + } + ], + "tags": [ + "ArtifactService" + ] + } + }, "/apis/v2beta1/auth": { "get": { "operationId": "AuthService_Authorize", @@ -1225,11 +1551,23 @@ "in": "query", "required": false, "type": "string" - } - ], - "tags": [ - "RunService" - ] + }, + { + "name": "view", + "description": "Optional view mode. This field can be used to adjust\nhow detailed the Run object that is returned will be.\n\n - DEFAULT: By default `tasks` field is omitted.\nThis provides a faster and leaner run object.\n - FULL: This view mode displays all the tasks for this run\nwith all its fields populated.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DEFAULT", + "FULL" + ], + "default": "DEFAULT" + } + ], + "tags": [ + "RunService" + ] }, "post": { "summary": "Creates a new run in an experiment specified by experiment ID.\nIf experiment ID is not specified, the run is created in the default experiment.", @@ -1303,6 +1641,18 @@ "in": "query", "required": false, "type": "string" + }, + { + "name": "view", + "description": "Optional view mode. This field can be used to adjust\nhow detailed the Run object that is returned will be.\n\n - DEFAULT: By default `tasks` field is omitted.\nThis provides a faster and leaner run object.\n - FULL: This view mode displays all the tasks for this run\nwith all its fields populated.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DEFAULT", + "FULL" + ], + "default": "DEFAULT" } ], "tags": [ @@ -1561,6 +1911,209 @@ ] } }, + "/apis/v2beta1/tasks": { + "get": { + "summary": "Lists tasks with optional filtering.", + "operationId": "list_tasks", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1ListTasksResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "parent_id", + "description": "List all tasks with this parent task.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "run_id", + "description": "List all tasks for this run.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "namespace", + "description": "List all tasks in this namespace.\nThe primary use case for this filter is to detect cache hits.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "page_size", + "in": "query", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "page_token", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filter", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "order_by", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "RunService" + ] + }, + "post": { + "summary": "Creates a new task.", + "operationId": "create_task", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1PipelineTaskDetail" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "task", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v2beta1PipelineTaskDetail" + } + } + ], + "tags": [ + "RunService" + ] + } + }, + "/apis/v2beta1/tasks/{task_id}": { + "get": { + "summary": "Gets a specific task by ID.", + "operationId": "get_task", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1PipelineTaskDetail" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "task_id", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "RunService" + ] + }, + "patch": { + "summary": "Updates an existing task.", + "operationId": "update_task", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1PipelineTaskDetail" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "task_id", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "task", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v2beta1PipelineTaskDetail" + } + } + ], + "tags": [ + "RunService" + ] + } + }, + "/apis/v2beta1/tasks:batchUpdate": { + "post": { + "summary": "Updates multiple tasks in bulk.", + "operationId": "batch_update_tasks", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1UpdateTasksBulkResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v2beta1UpdateTasksBulkRequest" + } + } + ], + "tags": [ + "RunService" + ] + } + }, "/apis/v2beta1/visualizations/{namespace}": { "post": { "operationId": "VisualizationService_CreateVisualizationV1", @@ -1601,6 +2154,311 @@ } }, "definitions": { + "ArtifactArtifactType": { + "type": "string", + "enum": [ + "TYPE_UNSPECIFIED", + "Artifact", + "Model", + "Dataset", + "HTML", + "Markdown", + "Metric", + "ClassificationMetric", + "SlicedClassificationMetric" + ], + "default": "TYPE_UNSPECIFIED", + "description": " - TYPE_UNSPECIFIED: default; treated as \"not set\"\nreject if unset." + }, + "googlerpcStatus": { + "type": "object", + "properties": { + "code": { + "type": "integer", + "format": "int32", + "description": "The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]." + }, + "message": { + "type": "string", + "description": "A developer-facing error message, which should be in English. Any\nuser-facing error message should be localized and sent in the\n[google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client." + }, + "details": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/protobufAny" + }, + "description": "A list of messages that carry the error details. There is a common set of\nmessage types for APIs to use." + } + }, + "description": "The `Status` type defines a logical error model that is suitable for\ndifferent programming environments, including REST APIs and RPC APIs. It is\nused by [gRPC](https://github.com/grpc). Each `Status` message contains\nthree pieces of data: error code, error message, and error details.\n\nYou can find out more about this error model and how to work with it in the\n[API Design Guide](https://cloud.google.com/apis/design/errors)." + }, + "protobufAny": { + "type": "object", + "properties": { + "@type": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "additionalProperties": {}, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + }, + "protobufNullValue": { + "type": "string", + "enum": [ + "NULL_VALUE" + ], + "default": "NULL_VALUE", + "description": "`NullValue` is a singleton enumeration to represent the null value for the\n`Value` type union.\n\nThe JSON representation for `NullValue` is JSON `null`.\n\n - NULL_VALUE: Null value." + }, + "v2beta1Artifact": { + "type": "object", + "properties": { + "artifact_id": { + "type": "string", + "title": "Output only. The unique server generated id of the artifact.\nNote: Updated id name to be consistent with other api naming patterns (with prefix)", + "readOnly": true + }, + "name": { + "type": "string", + "description": "Required. The client provided name of the artifact.\nNote: in MLMD when name was set, it had to be unique for that type_id\nthis restriction is removed here\nIf this is a \"Metric\" artifact, the name of the metric\nis treated as the Key in its K/V pair." + }, + "description": { + "type": "string" + }, + "type": { + "$ref": "#/definitions/ArtifactArtifactType", + "title": "Required. The name of an ArtifactType. E.g. Dataset" + }, + "uri": { + "type": "string", + "description": "The uniform resource identifier of the physical artifact.\nMay be empty if there is no physical artifact." + }, + "metadata": { + "type": "object", + "additionalProperties": {}, + "description": "Optional. User provided custom properties which are not defined by its type." + }, + "number_value": { + "type": "number", + "format": "double", + "title": "Used primarily for metrics" + }, + "created_at": { + "type": "string", + "format": "date-time", + "description": "Output only. Create time of the artifact in millisecond since epoch.\nNote: The type and name is updated from mlmd artifact to be consistent with other backend apis.", + "readOnly": true + }, + "namespace": { + "type": "string" + } + }, + "title": "Not to be confused with RuntimeArtifact in PipelineSpec" + }, + "v2beta1ArtifactTask": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Output only. The unique server generated id of the ArtifactTask.", + "readOnly": true + }, + "artifact_id": { + "type": "string" + }, + "run_id": { + "type": "string" + }, + "task_id": { + "type": "string" + }, + "type": { + "$ref": "#/definitions/v2beta1IOType" + }, + "producer": { + "$ref": "#/definitions/v2beta1IOProducer" + }, + "key": { + "type": "string" + } + }, + "title": "Describes a relationship link between Artifacts and Tasks" + }, + "v2beta1CreateArtifactRequest": { + "type": "object", + "properties": { + "artifact": { + "$ref": "#/definitions/v2beta1Artifact", + "description": "Required. The artifact to create." + }, + "run_id": { + "type": "string", + "description": "An artifact is always created in the context of a\nrun." + }, + "task_id": { + "type": "string", + "description": "The Task that is associated with the creation of this artifact." + }, + "producer_key": { + "type": "string", + "title": "The outgoing parameter name of this Artifact within this task's component spec.\nFor example:\ndef preprocess(my_output: dsl.Outputs[dsl.Artifact]):\n ...\nhere the producer_key == \"my_output\"\nNote that producer_task_name == task_name" + }, + "iteration_index": { + "type": "string", + "format": "int64", + "title": "If the producing task is in a parallelFor iteration\nthis field designates the iteration index" + }, + "type": { + "$ref": "#/definitions/v2beta1IOType" + } + } + }, + "v2beta1CreateArtifactTaskRequest": { + "type": "object", + "properties": { + "artifact_task": { + "$ref": "#/definitions/v2beta1ArtifactTask", + "description": "Required. The artifact-task relationship to create." + } + }, + "title": "Request to create an artifact-task relationship" + }, + "v2beta1CreateArtifactTasksBulkRequest": { + "type": "object", + "properties": { + "artifact_tasks": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v2beta1ArtifactTask" + }, + "description": "Required. The list of artifact-task relationships to create." + } + } + }, + "v2beta1CreateArtifactTasksBulkResponse": { + "type": "object", + "properties": { + "artifact_tasks": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v2beta1ArtifactTask" + }, + "description": "The list of created artifact-task relationships." + } + } + }, + "v2beta1CreateArtifactsBulkRequest": { + "type": "object", + "properties": { + "artifacts": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v2beta1CreateArtifactRequest" + }, + "description": "Required. The list of artifacts to create." + } + } + }, + "v2beta1CreateArtifactsBulkResponse": { + "type": "object", + "properties": { + "artifacts": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v2beta1Artifact" + }, + "description": "The list of created artifacts." + } + } + }, + "v2beta1IOProducer": { + "type": "object", + "properties": { + "task_name": { + "type": "string" + }, + "iteration": { + "type": "string", + "format": "int64", + "title": "When a source is from an iteration Runtime\ntask type inside a ParallelFor" + } + } + }, + "v2beta1IOType": { + "type": "string", + "enum": [ + "UNSPECIFIED", + "COMPONENT_DEFAULT_INPUT", + "TASK_OUTPUT_INPUT", + "COMPONENT_INPUT", + "RUNTIME_VALUE_INPUT", + "COLLECTED_INPUTS", + "ITERATOR_INPUT", + "ITERATOR_INPUT_RAW", + "ITERATOR_OUTPUT", + "OUTPUT", + "ONE_OF_OUTPUT", + "TASK_FINAL_STATUS_OUTPUT" + ], + "default": "UNSPECIFIED", + "description": "Describes the I/O relationship between\nArtifacts/Parameters and Tasks.\nThere are a couple of instances where\ninput/outputs have special types such\nas in the case of LoopArguments or\ndsl.Collected outputs.\n\n - UNSPECIFIED: For validation\n - COMPONENT_DEFAULT_INPUT: This is used for inputs that are\nprovided via default parameters in\nthe component input definitions\n - TASK_OUTPUT_INPUT: This is used for inputs that are\nprovided via upstream tasks.\nIn the sdk this appears as:\nTaskInputsSpec.kind.task_output_parameter\n& TaskInputsSpec.kind.task_output_artifact\n - COMPONENT_INPUT: Used for inputs that are\npassed from parent tasks.\n - RUNTIME_VALUE_INPUT: Hardcoded values passed\nas arguments to the task.\n - COLLECTED_INPUTS: Used for dsl.Collected\nUsage of this type indicates that all\nArtifacts within the IOArtifact.artifacts\nare inputs collected from sub tasks with\nITERATOR_OUTPUT outputs.\n - ITERATOR_INPUT: In a for loop task, introduced via ParallelFor, this type\nis used to indicate whether this resolved input belongs\nto a parameterIterator or artifactIterator.\nIn such a case the \"artifacts\" field for IOArtifact.artifacts\nis the list of resolved items for this parallelFor.\n - ITERATOR_INPUT_RAW: Hardcoded iterator parameters.\nRaw Iterator inputs have no producer\n - ITERATOR_OUTPUT: When an output is produced by a Runtime Iteration Task\nThis value is use to differentiate between standard inputs\n - OUTPUT: All other output types fall under this type.\n - ONE_OF_OUTPUT: An output of a Conditions branch." + }, + "v2beta1ListArtifactResponse": { + "type": "object", + "properties": { + "artifacts": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v2beta1Artifact" + }, + "description": "The list of artifacts returned." + }, + "total_size": { + "type": "integer", + "format": "int32", + "description": "The total number of artifacts available. This field is not always populated." + }, + "next_page_token": { + "type": "string", + "description": "A token to retrieve the next page of results, or empty if there are no\nmore results in the list." + } + } + }, + "v2beta1ListArtifactTasksResponse": { + "type": "object", + "properties": { + "artifact_tasks": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v2beta1ArtifactTask" + } + }, + "total_size": { + "type": "integer", + "format": "int32" + }, + "next_page_token": { + "type": "string" + } + } + }, "AuthorizeRequestResources": { "type": "string", "enum": [ @@ -1621,49 +2479,6 @@ "default": "UNASSIGNED_VERB", "description": "Type of verbs that act on the resources." }, - "googlerpcStatus": { - "type": "object", - "properties": { - "code": { - "type": "integer", - "format": "int32", - "description": "The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]." - }, - "message": { - "type": "string", - "description": "A developer-facing error message, which should be in English. Any\nuser-facing error message should be localized and sent in the\n[google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client." - }, - "details": { - "type": "array", - "items": { - "type": "object", - "$ref": "#/definitions/protobufAny" - }, - "description": "A list of messages that carry the error details. There is a common set of\nmessage types for APIs to use." - } - }, - "description": "The `Status` type defines a logical error model that is suitable for\ndifferent programming environments, including REST APIs and RPC APIs. It is\nused by [gRPC](https://github.com/grpc). Each `Status` message contains\nthree pieces of data: error code, error message, and error details.\n\nYou can find out more about this error model and how to work with it in the\n[API Design Guide](https://cloud.google.com/apis/design/errors)." - }, - "protobufAny": { - "type": "object", - "properties": { - "@type": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com. As of May 2023, there are no widely used type server\nimplementations and no plans to implement one.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } - }, - "additionalProperties": {}, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(&foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n // or ...\n if (any.isSameTypeAs(Foo.getDefaultInstance())) {\n foo = any.unpack(Foo.getDefaultInstance());\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := &pb.Foo{...}\n any, err := anypb.New(foo)\n if err != nil {\n ...\n }\n ...\n foo := &pb.Foo{}\n if err := any.UnmarshalTo(foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": ,\n \"lastName\": \n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - }, "v2beta1Experiment": { "type": "object", "properties": { @@ -1857,14 +2672,6 @@ } } }, - "protobufNullValue": { - "type": "string", - "enum": [ - "NULL_VALUE" - ], - "default": "NULL_VALUE", - "description": "`NullValue` is a singleton enumeration to represent the null value for the\n`Value` type union.\n\nThe JSON representation for `NullValue` is JSON `null`.\n\n - NULL_VALUE: Null value." - }, "v2beta1CreatePipelineAndVersionRequest": { "type": "object", "properties": { @@ -2214,6 +3021,44 @@ }, "description": "Trigger defines what starts a pipeline run." }, + "InputOutputsIOArtifact": { + "type": "object", + "properties": { + "artifacts": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v2beta1Artifact" + } + }, + "type": { + "$ref": "#/definitions/v2beta1IOType" + }, + "artifact_key": { + "type": "string" + }, + "producer": { + "$ref": "#/definitions/v2beta1IOProducer" + } + }, + "title": "Align structure with Executor Input" + }, + "InputOutputsIOParameter": { + "type": "object", + "properties": { + "value": {}, + "type": { + "$ref": "#/definitions/v2beta1IOType" + }, + "parameter_key": { + "type": "string" + }, + "producer": { + "$ref": "#/definitions/v2beta1IOProducer", + "description": "This field is optional because in the case of\nInput RuntimeValues, ComponentDefaultInputs,\nand Raw Iterator Input there are no producers." + } + } + }, "PipelineTaskDetailChildTask": { "type": "object", "properties": { @@ -2221,26 +3066,147 @@ "type": "string", "description": "System-generated ID of a task." }, - "pod_name": { - "type": "string", - "description": "Name of the corresponding pod assigned by the orchestration engine.\nAlso known as node_id." + "name": { + "type": "string" } }, "description": "A dependent task that requires this one to succeed.\nRepresented by either task_id or pod_name." }, - "v2beta1ArtifactList": { + "PipelineTaskDetailInputOutputs": { "type": "object", "properties": { - "artifact_ids": { + "parameters": { "type": "array", "items": { - "type": "string", - "format": "int64" + "type": "object", + "$ref": "#/definitions/InputOutputsIOParameter" + }, + "title": "For Loops parameters are filled with resolved\nparameterIterator.items" + }, + "artifacts": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/InputOutputsIOArtifact" }, - "description": "A list of artifact metadata ids." + "description": "Output Only. To create Artifacts for a task use\nArtifactTasks to link artifacts to tasks." + } + } + }, + "PipelineTaskDetailStatusMetadata": { + "type": "object", + "properties": { + "message": { + "type": "string", + "description": "KFP Backend will populate this field with error messages\nif any are available on a Failed task." + }, + "custom_properties": { + "type": "object", + "additionalProperties": {}, + "description": "Custom status metadata, this can be used to provide\nadditional status info for a given task during runtime\nThis is currently not utilized by KFP backend." + } + } + }, + "PipelineTaskDetailTaskPod": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "uid": { + "type": "string" + }, + "type": { + "$ref": "#/definitions/PipelineTaskDetailTaskPodType" + } + } + }, + "PipelineTaskDetailTaskPodType": { + "type": "string", + "enum": [ + "UNSPECIFIED", + "DRIVER", + "EXECUTOR" + ], + "default": "UNSPECIFIED" + }, + "PipelineTaskDetailTaskState": { + "type": "string", + "enum": [ + "RUNTIME_STATE_UNSPECIFIED", + "RUNNING", + "SUCCEEDED", + "SKIPPED", + "FAILED", + "CACHED" + ], + "default": "RUNTIME_STATE_UNSPECIFIED", + "description": "- RUNTIME_STATE_UNSPECIFIED: Default value. This value is not used.\n - RUNNING: Entity execution is in progress.\n - SUCCEEDED: Entity completed successfully.\n - SKIPPED: Entity has been skipped. For example, due to caching.\n - FAILED: Entity execution has failed.", + "title": "Runtime state of a Task" + }, + "PipelineTaskDetailTaskStatus": { + "type": "object", + "properties": { + "update_time": { + "type": "string", + "format": "date-time" + }, + "state": { + "$ref": "#/definitions/PipelineTaskDetailTaskState" + }, + "error": { + "$ref": "#/definitions/googlerpcStatus" } }, - "description": "A list of artifact metadata." + "description": "Timestamped representation of a Task state with an optional error." + }, + "PipelineTaskDetailTaskType": { + "type": "string", + "enum": [ + "ROOT", + "RUNTIME", + "CONDITION_BRANCH", + "CONDITION", + "LOOP", + "EXIT_HANDLER", + "IMPORTER", + "DAG" + ], + "default": "ROOT", + "description": " - ROOT: Root task is the top ancestor task to all tasks in the pipeline run\nIt is the only task with no parent task in a Pipeline Run.\n - RUNTIME: All child tasks in the Run DAG are Runtime tasks. With the exception\nof K8S driver pods.\nThese tasks are the only tasks that have Executor Pods.\n - CONDITION_BRANCH: Condition Branch is the wrapper task of an If block\n - CONDITION: Condition is an individual \"if\" branch, and is\na child to a CONDITION_BRANCH task.\n - LOOP: Task Group for CONDITION_BRANCH\nTask Group for RUNTIME Loop Iterations\n - DAG: Generic DAG task type for types like Nested Pipelines\nwhere there is no declarative way to detect this within\na driver." + }, + "PipelineTaskDetailTypeAttributes": { + "type": "object", + "properties": { + "iteration_index": { + "type": "string", + "format": "int64", + "title": "Optional. Applies to type Runtime that is an iteration" + }, + "iteration_count": { + "type": "string", + "format": "int64", + "title": "Optional. Applies to type LOOP" + } + } + }, + "v2beta1GetRunRequestViewMode": { + "type": "string", + "enum": [ + "DEFAULT", + "FULL" + ], + "default": "DEFAULT", + "description": " - DEFAULT: By default `tasks` field is omitted.\nThis provides a faster and leaner run object.\n - FULL: This view mode displays all the tasks for this run\nwith all its fields populated." + }, + "v2beta1ListRunsRequestViewMode": { + "type": "string", + "enum": [ + "DEFAULT", + "FULL" + ], + "default": "DEFAULT", + "description": " - DEFAULT: By default `tasks` field is omitted.\nThis provides a faster and leaner run object.\n - FULL: This view mode displays all the tasks for this run\nwith all its fields populated." }, "v2beta1ListRunsResponse": { "type": "object", @@ -2264,20 +3230,52 @@ } } }, + "v2beta1ListTasksResponse": { + "type": "object", + "properties": { + "tasks": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v2beta1PipelineTaskDetail" + } + }, + "next_page_token": { + "type": "string" + }, + "total_size": { + "type": "integer", + "format": "int32" + } + } + }, "v2beta1PipelineTaskDetail": { "type": "object", "properties": { - "run_id": { + "name": { + "type": "string" + }, + "display_name": { "type": "string", - "description": "ID of the parent run." + "description": "User specified name of a task that is defined in\n[Pipeline.spec][]." }, "task_id": { "type": "string", "description": "System-generated ID of a task." }, - "display_name": { + "run_id": { "type": "string", - "description": "User specified name of a task that is defined in\n[Pipeline.spec][]." + "description": "ID of the parent run." + }, + "pods": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/PipelineTaskDetailTaskPod" + } + }, + "cache_fingerprint": { + "type": "string" }, "create_time": { "type": "string", @@ -2294,52 +3292,33 @@ "format": "date-time", "description": "Completion time of a task." }, - "executor_detail": { - "$ref": "#/definitions/v2beta1PipelineTaskExecutorDetail", - "description": "Execution information of a task." - }, "state": { - "$ref": "#/definitions/v2beta1RuntimeState", - "description": "Runtime state of a task." - }, - "execution_id": { - "type": "string", - "format": "int64", - "description": "Execution id of the corresponding entry in ML metadata store." - }, - "error": { - "$ref": "#/definitions/googlerpcStatus", - "description": "The error that occurred during task execution.\nOnly populated when the task is in FAILED or CANCELED state." - }, - "inputs": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/v2beta1ArtifactList" - }, - "description": "Input artifacts of the task." - }, - "outputs": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/v2beta1ArtifactList" - }, - "description": "Output artifacts of the task." + "$ref": "#/definitions/PipelineTaskDetailTaskState" }, - "parent_task_id": { - "type": "string", - "description": "ID of the parent task if the task is within a component scope.\nEmpty if the task is at the root level." + "status_metadata": { + "$ref": "#/definitions/PipelineTaskDetailStatusMetadata" }, "state_history": { "type": "array", "items": { "type": "object", - "$ref": "#/definitions/v2beta1RuntimeStatus" + "$ref": "#/definitions/PipelineTaskDetailTaskStatus" }, "description": "A sequence of task statuses. This field keeps a record\nof state transitions." }, - "pod_name": { + "type": { + "$ref": "#/definitions/PipelineTaskDetailTaskType" + }, + "type_attributes": { + "$ref": "#/definitions/PipelineTaskDetailTypeAttributes" + }, + "error": { + "$ref": "#/definitions/googlerpcStatus", + "description": "The error that occurred during task execution.\nOnly populated when the task is in FAILED or CANCELED state." + }, + "parent_task_id": { "type": "string", - "description": "Name of the corresponding pod assigned by the orchestration engine.\nAlso known as node_id." + "description": "ID of the parent task if the task is within a component scope.\nEmpty if the task is at the root level." }, "child_tasks": { "type": "array", @@ -2347,38 +3326,23 @@ "type": "object", "$ref": "#/definitions/PipelineTaskDetailChildTask" }, - "description": "Sequence of dependen tasks." - } - }, - "description": "Runtime information of a task execution." - }, - "v2beta1PipelineTaskExecutorDetail": { - "type": "object", - "properties": { - "main_job": { - "type": "string", - "description": "The name of the job for the main container execution." + "description": "Sequence of dependent tasks." }, - "pre_caching_check_job": { - "type": "string", - "description": "The name of the job for the pre-caching-check container\nexecution. This job will be available if the\nRun.pipeline_spec specifies the `pre_caching_check` hook in\nthe lifecycle events." + "inputs": { + "$ref": "#/definitions/PipelineTaskDetailInputOutputs" }, - "failed_main_jobs": { - "type": "array", - "items": { - "type": "string" - }, - "description": "The names of the previously failed job for the main container\nexecutions. The list includes the all attempts in chronological order." + "outputs": { + "$ref": "#/definitions/PipelineTaskDetailInputOutputs" }, - "failed_pre_caching_check_jobs": { + "scope_path": { "type": "array", "items": { "type": "string" }, - "description": "The names of the previously failed job for the\npre-caching-check container executions. This job will be available if the\nRun.pipeline_spec specifies the `pre_caching_check` hook in\nthe lifecycle events.\nThe list includes the all attempts in chronological order." + "description": "The scope of this task within the\npipeline spec. Each entry represents\neither a Dag Task or a Container task.\nNote that Container task will are\nalways the last entry in a scope_path." } }, - "description": "Runtime information of a pipeline task executor." + "description": "Runtime information of a task execution." }, "v2beta1ReadArtifactResponse": { "type": "object", @@ -2458,7 +3422,7 @@ }, "run_details": { "$ref": "#/definitions/v2beta1RunDetails", - "description": "Output. Runtime details of a run." + "title": "Output. Runtime details of a run.\nEither remove or deprecate this" }, "recurring_run_id": { "type": "string", @@ -2471,6 +3435,22 @@ "$ref": "#/definitions/v2beta1RuntimeStatus" }, "description": "Output. A sequence of run statuses. This field keeps a record\nof state transitions." + }, + "pipeline_reference": { + "$ref": "#/definitions/v2beta1PipelineVersionReference", + "description": "Output only. Reference to the pipeline used for this run.", + "readOnly": true + }, + "task_count": { + "type": "integer", + "format": "int32" + }, + "tasks": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v2beta1PipelineTaskDetail" + } } } }, @@ -2543,6 +3523,30 @@ }, "description": "Timestamped representation of a runtime state with an optional error." }, + "v2beta1UpdateTasksBulkRequest": { + "type": "object", + "properties": { + "tasks": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/v2beta1PipelineTaskDetail" + }, + "title": "Required. Map of task ID to task detail for bulk update.\nKey: task_id, Value: PipelineTaskDetail to update" + } + } + }, + "v2beta1UpdateTasksBulkResponse": { + "type": "object", + "properties": { + "tasks": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/v2beta1PipelineTaskDetail" + }, + "title": "Map of task ID to updated task detail.\nKey: task_id, Value: Updated PipelineTaskDetail" + } + } + }, "v2beta1Visualization": { "type": "object", "properties": { diff --git a/backend/api/v2beta1/swagger/run.swagger.json b/backend/api/v2beta1/swagger/run.swagger.json index e760b89f924..d3e56df99fe 100644 --- a/backend/api/v2beta1/swagger/run.swagger.json +++ b/backend/api/v2beta1/swagger/run.swagger.json @@ -81,6 +81,18 @@ "in": "query", "required": false, "type": "string" + }, + { + "name": "view", + "description": "Optional view mode. This field can be used to adjust\nhow detailed the Run object that is returned will be.\n\n - DEFAULT: By default `tasks` field is omitted.\nThis provides a faster and leaner run object.\n - FULL: This view mode displays all the tasks for this run\nwith all its fields populated.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DEFAULT", + "FULL" + ], + "default": "DEFAULT" } ], "tags": [ @@ -159,6 +171,18 @@ "in": "query", "required": false, "type": "string" + }, + { + "name": "view", + "description": "Optional view mode. This field can be used to adjust\nhow detailed the Run object that is returned will be.\n\n - DEFAULT: By default `tasks` field is omitted.\nThis provides a faster and leaner run object.\n - FULL: This view mode displays all the tasks for this run\nwith all its fields populated.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DEFAULT", + "FULL" + ], + "default": "DEFAULT" } ], "tags": [ @@ -416,9 +440,266 @@ "RunService" ] } + }, + "/apis/v2beta1/tasks": { + "get": { + "summary": "Lists tasks with optional filtering.", + "operationId": "list_tasks", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1ListTasksResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "parent_id", + "description": "List all tasks with this parent task.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "run_id", + "description": "List all tasks for this run.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "namespace", + "description": "List all tasks in this namespace.\nThe primary use case for this filter is to detect cache hits.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "page_size", + "in": "query", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "page_token", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filter", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "order_by", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "RunService" + ] + }, + "post": { + "summary": "Creates a new task.", + "operationId": "create_task", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1PipelineTaskDetail" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "task", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v2beta1PipelineTaskDetail" + } + } + ], + "tags": [ + "RunService" + ] + } + }, + "/apis/v2beta1/tasks/{task_id}": { + "get": { + "summary": "Gets a specific task by ID.", + "operationId": "get_task", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1PipelineTaskDetail" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "task_id", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "RunService" + ] + }, + "patch": { + "summary": "Updates an existing task.", + "operationId": "update_task", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1PipelineTaskDetail" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "task_id", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "task", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v2beta1PipelineTaskDetail" + } + } + ], + "tags": [ + "RunService" + ] + } + }, + "/apis/v2beta1/tasks:batchUpdate": { + "post": { + "summary": "Updates multiple tasks in bulk.", + "operationId": "batch_update_tasks", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/v2beta1UpdateTasksBulkResponse" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/v2beta1UpdateTasksBulkRequest" + } + } + ], + "tags": [ + "RunService" + ] + } } }, "definitions": { + "ArtifactArtifactType": { + "type": "string", + "enum": [ + "TYPE_UNSPECIFIED", + "Artifact", + "Model", + "Dataset", + "HTML", + "Markdown", + "Metric", + "ClassificationMetric", + "SlicedClassificationMetric" + ], + "default": "TYPE_UNSPECIFIED", + "description": " - TYPE_UNSPECIFIED: default; treated as \"not set\"\nreject if unset." + }, + "InputOutputsIOArtifact": { + "type": "object", + "properties": { + "artifacts": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v2beta1Artifact" + } + }, + "type": { + "$ref": "#/definitions/v2beta1IOType" + }, + "artifact_key": { + "type": "string" + }, + "producer": { + "$ref": "#/definitions/v2beta1IOProducer" + } + }, + "title": "Align structure with Executor Input" + }, + "InputOutputsIOParameter": { + "type": "object", + "properties": { + "value": {}, + "type": { + "$ref": "#/definitions/v2beta1IOType" + }, + "parameter_key": { + "type": "string" + }, + "producer": { + "$ref": "#/definitions/v2beta1IOProducer", + "description": "This field is optional because in the case of\nInput RuntimeValues, ComponentDefaultInputs,\nand Raw Iterator Input there are no producers." + } + } + }, "PipelineTaskDetailChildTask": { "type": "object", "properties": { @@ -426,13 +707,130 @@ "type": "string", "description": "System-generated ID of a task." }, - "pod_name": { - "type": "string", - "description": "Name of the corresponding pod assigned by the orchestration engine.\nAlso known as node_id." + "name": { + "type": "string" } }, "description": "A dependent task that requires this one to succeed.\nRepresented by either task_id or pod_name." }, + "PipelineTaskDetailInputOutputs": { + "type": "object", + "properties": { + "parameters": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/InputOutputsIOParameter" + }, + "title": "For Loops parameters are filled with resolved\nparameterIterator.items" + }, + "artifacts": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/InputOutputsIOArtifact" + }, + "description": "Output Only. To create Artifacts for a task use\nArtifactTasks to link artifacts to tasks." + } + } + }, + "PipelineTaskDetailStatusMetadata": { + "type": "object", + "properties": { + "message": { + "type": "string", + "description": "KFP Backend will populate this field with error messages\nif any are available on a Failed task." + }, + "custom_properties": { + "type": "object", + "additionalProperties": {}, + "description": "Custom status metadata, this can be used to provide\nadditional status info for a given task during runtime\nThis is currently not utilized by KFP backend." + } + } + }, + "PipelineTaskDetailTaskPod": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "uid": { + "type": "string" + }, + "type": { + "$ref": "#/definitions/PipelineTaskDetailTaskPodType" + } + } + }, + "PipelineTaskDetailTaskPodType": { + "type": "string", + "enum": [ + "UNSPECIFIED", + "DRIVER", + "EXECUTOR" + ], + "default": "UNSPECIFIED" + }, + "PipelineTaskDetailTaskState": { + "type": "string", + "enum": [ + "RUNTIME_STATE_UNSPECIFIED", + "RUNNING", + "SUCCEEDED", + "SKIPPED", + "FAILED", + "CACHED" + ], + "default": "RUNTIME_STATE_UNSPECIFIED", + "description": "- RUNTIME_STATE_UNSPECIFIED: Default value. This value is not used.\n - RUNNING: Entity execution is in progress.\n - SUCCEEDED: Entity completed successfully.\n - SKIPPED: Entity has been skipped. For example, due to caching.\n - FAILED: Entity execution has failed.", + "title": "Runtime state of a Task" + }, + "PipelineTaskDetailTaskStatus": { + "type": "object", + "properties": { + "update_time": { + "type": "string", + "format": "date-time" + }, + "state": { + "$ref": "#/definitions/PipelineTaskDetailTaskState" + }, + "error": { + "$ref": "#/definitions/googlerpcStatus" + } + }, + "description": "Timestamped representation of a Task state with an optional error." + }, + "PipelineTaskDetailTaskType": { + "type": "string", + "enum": [ + "ROOT", + "RUNTIME", + "CONDITION_BRANCH", + "CONDITION", + "LOOP", + "EXIT_HANDLER", + "IMPORTER", + "DAG" + ], + "default": "ROOT", + "description": " - ROOT: Root task is the top ancestor task to all tasks in the pipeline run\nIt is the only task with no parent task in a Pipeline Run.\n - RUNTIME: All child tasks in the Run DAG are Runtime tasks. With the exception\nof K8S driver pods.\nThese tasks are the only tasks that have Executor Pods.\n - CONDITION_BRANCH: Condition Branch is the wrapper task of an If block\n - CONDITION: Condition is an individual \"if\" branch, and is\na child to a CONDITION_BRANCH task.\n - LOOP: Task Group for CONDITION_BRANCH\nTask Group for RUNTIME Loop Iterations\n - DAG: Generic DAG task type for types like Nested Pipelines\nwhere there is no declarative way to detect this within\na driver." + }, + "PipelineTaskDetailTypeAttributes": { + "type": "object", + "properties": { + "iteration_index": { + "type": "string", + "format": "int64", + "title": "Optional. Applies to type Runtime that is an iteration" + }, + "iteration_count": { + "type": "string", + "format": "int64", + "title": "Optional. Applies to type LOOP" + } + } + }, "googlerpcStatus": { "type": "object", "properties": { @@ -475,19 +873,100 @@ "default": "NULL_VALUE", "description": "`NullValue` is a singleton enumeration to represent the null value for the\n`Value` type union.\n\nThe JSON representation for `NullValue` is JSON `null`.\n\n - NULL_VALUE: Null value." }, - "v2beta1ArtifactList": { + "v2beta1Artifact": { "type": "object", "properties": { - "artifact_ids": { - "type": "array", - "items": { - "type": "string", - "format": "int64" - }, - "description": "A list of artifact metadata ids." + "artifact_id": { + "type": "string", + "title": "Output only. The unique server generated id of the artifact.\nNote: Updated id name to be consistent with other api naming patterns (with prefix)", + "readOnly": true + }, + "name": { + "type": "string", + "description": "Required. The client provided name of the artifact.\nNote: in MLMD when name was set, it had to be unique for that type_id\nthis restriction is removed here\nIf this is a \"Metric\" artifact, the name of the metric\nis treated as the Key in its K/V pair." + }, + "description": { + "type": "string" + }, + "type": { + "$ref": "#/definitions/ArtifactArtifactType", + "title": "Required. The name of an ArtifactType. E.g. Dataset" + }, + "uri": { + "type": "string", + "description": "The uniform resource identifier of the physical artifact.\nMay be empty if there is no physical artifact." + }, + "metadata": { + "type": "object", + "additionalProperties": {}, + "description": "Optional. User provided custom properties which are not defined by its type." + }, + "number_value": { + "type": "number", + "format": "double", + "title": "Used primarily for metrics" + }, + "created_at": { + "type": "string", + "format": "date-time", + "description": "Output only. Create time of the artifact in millisecond since epoch.\nNote: The type and name is updated from mlmd artifact to be consistent with other backend apis.", + "readOnly": true + }, + "namespace": { + "type": "string" } }, - "description": "A list of artifact metadata." + "title": "Not to be confused with RuntimeArtifact in PipelineSpec" + }, + "v2beta1GetRunRequestViewMode": { + "type": "string", + "enum": [ + "DEFAULT", + "FULL" + ], + "default": "DEFAULT", + "description": " - DEFAULT: By default `tasks` field is omitted.\nThis provides a faster and leaner run object.\n - FULL: This view mode displays all the tasks for this run\nwith all its fields populated." + }, + "v2beta1IOProducer": { + "type": "object", + "properties": { + "task_name": { + "type": "string" + }, + "iteration": { + "type": "string", + "format": "int64", + "title": "When a source is from an iteration Runtime\ntask type inside a ParallelFor" + } + } + }, + "v2beta1IOType": { + "type": "string", + "enum": [ + "UNSPECIFIED", + "COMPONENT_DEFAULT_INPUT", + "TASK_OUTPUT_INPUT", + "COMPONENT_INPUT", + "RUNTIME_VALUE_INPUT", + "COLLECTED_INPUTS", + "ITERATOR_INPUT", + "ITERATOR_INPUT_RAW", + "ITERATOR_OUTPUT", + "OUTPUT", + "ONE_OF_OUTPUT", + "TASK_FINAL_STATUS_OUTPUT" + ], + "default": "UNSPECIFIED", + "description": "Describes the I/O relationship between\nArtifacts/Parameters and Tasks.\nThere are a couple of instances where\ninput/outputs have special types such\nas in the case of LoopArguments or\ndsl.Collected outputs.\n\n - UNSPECIFIED: For validation\n - COMPONENT_DEFAULT_INPUT: This is used for inputs that are\nprovided via default parameters in\nthe component input definitions\n - TASK_OUTPUT_INPUT: This is used for inputs that are\nprovided via upstream tasks.\nIn the sdk this appears as:\nTaskInputsSpec.kind.task_output_parameter\n\u0026 TaskInputsSpec.kind.task_output_artifact\n - COMPONENT_INPUT: Used for inputs that are\npassed from parent tasks.\n - RUNTIME_VALUE_INPUT: Hardcoded values passed\nas arguments to the task.\n - COLLECTED_INPUTS: Used for dsl.Collected\nUsage of this type indicates that all\nArtifacts within the IOArtifact.artifacts\nare inputs collected from sub tasks with\nITERATOR_OUTPUT outputs.\n - ITERATOR_INPUT: In a for loop task, introduced via ParallelFor, this type\nis used to indicate whether this resolved input belongs\nto a parameterIterator or artifactIterator.\nIn such a case the \"artifacts\" field for IOArtifact.artifacts\nis the list of resolved items for this parallelFor.\n - ITERATOR_INPUT_RAW: Hardcoded iterator parameters.\nRaw Iterator inputs have no producer\n - ITERATOR_OUTPUT: When an output is produced by a Runtime Iteration Task\nThis value is use to differentiate between standard inputs\n - OUTPUT: All other output types fall under this type.\n - ONE_OF_OUTPUT: An output of a Conditions branch." + }, + "v2beta1ListRunsRequestViewMode": { + "type": "string", + "enum": [ + "DEFAULT", + "FULL" + ], + "default": "DEFAULT", + "description": " - DEFAULT: By default `tasks` field is omitted.\nThis provides a faster and leaner run object.\n - FULL: This view mode displays all the tasks for this run\nwith all its fields populated." }, "v2beta1ListRunsResponse": { "type": "object", @@ -511,20 +990,52 @@ } } }, + "v2beta1ListTasksResponse": { + "type": "object", + "properties": { + "tasks": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v2beta1PipelineTaskDetail" + } + }, + "next_page_token": { + "type": "string" + }, + "total_size": { + "type": "integer", + "format": "int32" + } + } + }, "v2beta1PipelineTaskDetail": { "type": "object", "properties": { - "run_id": { + "name": { + "type": "string" + }, + "display_name": { "type": "string", - "description": "ID of the parent run." + "description": "User specified name of a task that is defined in\n[Pipeline.spec][]." }, "task_id": { "type": "string", "description": "System-generated ID of a task." }, - "display_name": { + "run_id": { "type": "string", - "description": "User specified name of a task that is defined in\n[Pipeline.spec][]." + "description": "ID of the parent run." + }, + "pods": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/PipelineTaskDetailTaskPod" + } + }, + "cache_fingerprint": { + "type": "string" }, "create_time": { "type": "string", @@ -541,52 +1052,33 @@ "format": "date-time", "description": "Completion time of a task." }, - "executor_detail": { - "$ref": "#/definitions/v2beta1PipelineTaskExecutorDetail", - "description": "Execution information of a task." - }, "state": { - "$ref": "#/definitions/v2beta1RuntimeState", - "description": "Runtime state of a task." - }, - "execution_id": { - "type": "string", - "format": "int64", - "description": "Execution id of the corresponding entry in ML metadata store." - }, - "error": { - "$ref": "#/definitions/googlerpcStatus", - "description": "The error that occurred during task execution.\nOnly populated when the task is in FAILED or CANCELED state." + "$ref": "#/definitions/PipelineTaskDetailTaskState" }, - "inputs": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/v2beta1ArtifactList" - }, - "description": "Input artifacts of the task." - }, - "outputs": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/v2beta1ArtifactList" - }, - "description": "Output artifacts of the task." - }, - "parent_task_id": { - "type": "string", - "description": "ID of the parent task if the task is within a component scope.\nEmpty if the task is at the root level." + "status_metadata": { + "$ref": "#/definitions/PipelineTaskDetailStatusMetadata" }, "state_history": { "type": "array", "items": { "type": "object", - "$ref": "#/definitions/v2beta1RuntimeStatus" + "$ref": "#/definitions/PipelineTaskDetailTaskStatus" }, "description": "A sequence of task statuses. This field keeps a record\nof state transitions." }, - "pod_name": { + "type": { + "$ref": "#/definitions/PipelineTaskDetailTaskType" + }, + "type_attributes": { + "$ref": "#/definitions/PipelineTaskDetailTypeAttributes" + }, + "error": { + "$ref": "#/definitions/googlerpcStatus", + "description": "The error that occurred during task execution.\nOnly populated when the task is in FAILED or CANCELED state." + }, + "parent_task_id": { "type": "string", - "description": "Name of the corresponding pod assigned by the orchestration engine.\nAlso known as node_id." + "description": "ID of the parent task if the task is within a component scope.\nEmpty if the task is at the root level." }, "child_tasks": { "type": "array", @@ -594,38 +1086,23 @@ "type": "object", "$ref": "#/definitions/PipelineTaskDetailChildTask" }, - "description": "Sequence of dependen tasks." - } - }, - "description": "Runtime information of a task execution." - }, - "v2beta1PipelineTaskExecutorDetail": { - "type": "object", - "properties": { - "main_job": { - "type": "string", - "description": "The name of the job for the main container execution." + "description": "Sequence of dependent tasks." }, - "pre_caching_check_job": { - "type": "string", - "description": "The name of the job for the pre-caching-check container\nexecution. This job will be available if the\nRun.pipeline_spec specifies the `pre_caching_check` hook in\nthe lifecycle events." + "inputs": { + "$ref": "#/definitions/PipelineTaskDetailInputOutputs" }, - "failed_main_jobs": { - "type": "array", - "items": { - "type": "string" - }, - "description": "The names of the previously failed job for the main container\nexecutions. The list includes the all attempts in chronological order." + "outputs": { + "$ref": "#/definitions/PipelineTaskDetailInputOutputs" }, - "failed_pre_caching_check_jobs": { + "scope_path": { "type": "array", "items": { "type": "string" }, - "description": "The names of the previously failed job for the\npre-caching-check container executions. This job will be available if the\nRun.pipeline_spec specifies the `pre_caching_check` hook in\nthe lifecycle events.\nThe list includes the all attempts in chronological order." + "description": "The scope of this task within the\npipeline spec. Each entry represents\neither a Dag Task or a Container task.\nNote that Container task will are\nalways the last entry in a scope_path." } }, - "description": "Runtime information of a pipeline task executor." + "description": "Runtime information of a task execution." }, "v2beta1PipelineVersionReference": { "type": "object", @@ -719,7 +1196,7 @@ }, "run_details": { "$ref": "#/definitions/v2beta1RunDetails", - "description": "Output. Runtime details of a run." + "title": "Output. Runtime details of a run.\nEither remove or deprecate this" }, "recurring_run_id": { "type": "string", @@ -732,6 +1209,22 @@ "$ref": "#/definitions/v2beta1RuntimeStatus" }, "description": "Output. A sequence of run statuses. This field keeps a record\nof state transitions." + }, + "pipeline_reference": { + "$ref": "#/definitions/v2beta1PipelineVersionReference", + "description": "Output only. Reference to the pipeline used for this run.", + "readOnly": true + }, + "task_count": { + "type": "integer", + "format": "int32" + }, + "tasks": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v2beta1PipelineTaskDetail" + } } } }, @@ -818,6 +1311,30 @@ } }, "description": "Timestamped representation of a runtime state with an optional error." + }, + "v2beta1UpdateTasksBulkRequest": { + "type": "object", + "properties": { + "tasks": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/v2beta1PipelineTaskDetail" + }, + "title": "Required. Map of task ID to task detail for bulk update.\nKey: task_id, Value: PipelineTaskDetail to update" + } + } + }, + "v2beta1UpdateTasksBulkResponse": { + "type": "object", + "properties": { + "tasks": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/v2beta1PipelineTaskDetail" + }, + "title": "Map of task ID to updated task detail.\nKey: task_id, Value: Updated PipelineTaskDetail" + } + } } }, "securityDefinitions": { diff --git a/backend/metadata_writer/Dockerfile b/backend/metadata_writer/Dockerfile deleted file mode 100644 index 046d8ae0b92..00000000000 --- a/backend/metadata_writer/Dockerfile +++ /dev/null @@ -1,7 +0,0 @@ -# ml-metadata package depends on tensorflow package -FROM python:3.11 -COPY backend/metadata_writer/requirements.txt /kfp/metadata_writer/ -RUN python3 -m pip install -r /kfp/metadata_writer/requirements.txt - -COPY backend/metadata_writer/src/* /kfp/metadata_writer/ -CMD python3 -u /kfp/metadata_writer/metadata_writer.py diff --git a/backend/metadata_writer/README.md b/backend/metadata_writer/README.md deleted file mode 100644 index eb8166edd69..00000000000 --- a/backend/metadata_writer/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# metadata\_writer - -## Updating python dependencies - -[pip-tools](https://github.com/jazzband/pip-tools) is used to manage python -dependencies. To update dependencies, edit [requirements.in](requirements.in) -and run `./update_requirements.sh` to update and pin the transitive -dependencies. diff --git a/backend/metadata_writer/requirements.in b/backend/metadata_writer/requirements.in deleted file mode 100644 index 6374cd18433..00000000000 --- a/backend/metadata_writer/requirements.in +++ /dev/null @@ -1,4 +0,0 @@ -kubernetes>=8.0.0,<=31.0.0 -ml-metadata==1.17.0 -lru-dict>=1.1.7,<2.0.0 -PyYAML>=5.3,<7 diff --git a/backend/metadata_writer/requirements.txt b/backend/metadata_writer/requirements.txt deleted file mode 100644 index cf63e9909b7..00000000000 --- a/backend/metadata_writer/requirements.txt +++ /dev/null @@ -1,69 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --output-file=- --resolver=backtracking - -# -absl-py==1.4.0 - # via ml-metadata -attrs==23.2.0 - # via ml-metadata -cachetools==5.5.2 - # via google-auth -certifi==2025.8.3 - # via - # kubernetes - # requests -charset-normalizer==3.4.3 - # via requests -durationpy==0.10 - # via kubernetes -google-auth==2.40.3 - # via kubernetes -grpcio==1.74.0 - # via ml-metadata -idna==3.10 - # via requests -kubernetes==31.0.0 - # via -r - -lru-dict==1.3.0 - # via -r - -ml-metadata==1.17.0 - # via -r - -oauthlib==3.3.1 - # via - # kubernetes - # requests-oauthlib -protobuf==4.25.8 - # via ml-metadata -pyasn1==0.6.1 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.2 - # via google-auth -python-dateutil==2.9.0.post0 - # via kubernetes -pyyaml==6.0.2 - # via - # -r - - # kubernetes -requests==2.32.5 - # via - # kubernetes - # requests-oauthlib -requests-oauthlib==2.0.0 - # via kubernetes -rsa==4.9.1 - # via google-auth -six==1.17.0 - # via - # kubernetes - # ml-metadata - # python-dateutil -urllib3==2.5.0 - # via - # kubernetes - # requests -websocket-client==1.8.0 - # via kubernetes diff --git a/backend/metadata_writer/src/metadata_helpers.py b/backend/metadata_writer/src/metadata_helpers.py deleted file mode 100644 index b47d10163a9..00000000000 --- a/backend/metadata_writer/src/metadata_helpers.py +++ /dev/null @@ -1,434 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import functools -import json -import os -import sys -from time import sleep -from ml_metadata.proto import metadata_store_pb2 -from ml_metadata.metadata_store import metadata_store -from ipaddress import ip_address, IPv4Address - -def value_to_mlmd_value(value) -> metadata_store_pb2.Value: - if value is None: - return metadata_store_pb2.Value() - if isinstance(value, int): - return metadata_store_pb2.Value(int_value=value) - if isinstance(value, float): - return metadata_store_pb2.Value(double_value=value) - return metadata_store_pb2.Value(string_value=str(value)) - - -def connect_to_mlmd() -> metadata_store.MetadataStore: - metadata_service_host = os.environ.get("METADATA_GRPC_SERVICE_SERVICE_HOST", "metadata-grpc-service.kubeflow") - metadata_service_port = int(os.environ.get("METADATA_GRPC_SERVICE_SERVICE_PORT", 8080)) - metadata_service_host = "metadata-grpc-service.kubeflow" - metadata_service_port = 8080 - - mlmd_connection_config = metadata_store_pb2.MetadataStoreClientConfig( - host="[{}]".format(metadata_service_host) if isIPv6(metadata_service_host) else metadata_service_host, - port=metadata_service_port, - ) - - tls_enabled = os.environ.get("METADATA_TLS_ENABLED", "false").lower() in ("1", "true", "yes") - - if tls_enabled: - ca_pem = None - ca_cert_path = os.environ.get("CA_CERT_PATH") - - if ca_cert_path and os.path.exists(ca_cert_path): - with open(ca_cert_path, "r", encoding="utf-8") as f: - ca_pem = f.read() - - ssl_cfg = metadata_store_pb2.MetadataStoreClientConfig.SSLConfig(custom_ca=ca_pem) - mlmd_connection_config.ssl_config.CopyFrom(ssl_cfg) - - # Checking the connection to the Metadata store. - for _ in range(100): - try: - mlmd_store = metadata_store.MetadataStore(mlmd_connection_config) - _ = mlmd_store.get_context_types() - return mlmd_store - except Exception as e: - print('Failed to access the Metadata store. Exception: "{}"'.format(str(e)), file=sys.stderr) - sys.stderr.flush() - sleep(1) - - raise RuntimeError('Could not connect to the Metadata store.') - - -def get_or_create_artifact_type(store, type_name, properties: dict = None) -> metadata_store_pb2.ArtifactType: - try: - artifact_type = store.get_artifact_type(type_name=type_name) - return artifact_type - except: - artifact_type = metadata_store_pb2.ArtifactType( - name=type_name, - properties=properties, - ) - artifact_type.id = store.put_artifact_type(artifact_type) # Returns ID - return artifact_type - - -def get_or_create_execution_type(store, type_name, properties: dict = None) -> metadata_store_pb2.ExecutionType: - try: - execution_type = store.get_execution_type(type_name=type_name) - return execution_type - except: - execution_type = metadata_store_pb2.ExecutionType( - name=type_name, - properties=properties, - ) - execution_type.id = store.put_execution_type(execution_type) # Returns ID - return execution_type - - -def get_or_create_context_type(store, type_name, properties: dict = None) -> metadata_store_pb2.ContextType: - try: - context_type = store.get_context_type(type_name=type_name) - return context_type - except: - context_type = metadata_store_pb2.ContextType( - name=type_name, - properties=properties, - ) - context_type.id = store.put_context_type(context_type) # Returns ID - return context_type - - -def create_artifact_with_type( - store, - uri: str, - type_name: str, - properties: dict = None, - type_properties: dict = None, - custom_properties: dict = None, -) -> metadata_store_pb2.Artifact: - artifact_type = get_or_create_artifact_type( - store=store, - type_name=type_name, - properties=type_properties, - ) - artifact = metadata_store_pb2.Artifact( - uri=uri, - type_id=artifact_type.id, - properties=properties, - custom_properties=custom_properties, - ) - artifact.id = store.put_artifacts([artifact])[0] - return artifact - - -def create_execution_with_type( - store, - type_name: str, - properties: dict = None, - type_properties: dict = None, - custom_properties: dict = None, -) -> metadata_store_pb2.Execution: - execution_type = get_or_create_execution_type( - store=store, - type_name=type_name, - properties=type_properties, - ) - execution = metadata_store_pb2.Execution( - type_id=execution_type.id, - properties=properties, - custom_properties=custom_properties, - ) - execution.id = store.put_executions([execution])[0] - return execution - - -def create_context_with_type( - store, - context_name: str, - type_name: str, - properties: dict = None, - type_properties: dict = None, - custom_properties: dict = None, -) -> metadata_store_pb2.Context: - # ! Context_name must be unique - context_type = get_or_create_context_type( - store=store, - type_name=type_name, - properties=type_properties, - ) - context = metadata_store_pb2.Context( - name=context_name, - type_id=context_type.id, - properties=properties, - custom_properties=custom_properties, - ) - context.id = store.put_contexts([context])[0] - return context - - -@functools.lru_cache(maxsize=128) -def get_context_by_name( - store, - context_name: str, -) -> metadata_store_pb2.Context: - matching_contexts = [context for context in store.get_contexts() if context.name == context_name] - assert len(matching_contexts) <= 1 - if len(matching_contexts) == 0: - raise ValueError('Context with name "{}" was not found'.format(context_name)) - return matching_contexts[0] - - -def get_or_create_context_with_type( - store, - context_name: str, - type_name: str, - properties: dict = None, - type_properties: dict = None, - custom_properties: dict = None, -) -> metadata_store_pb2.Context: - try: - context = get_context_by_name(store, context_name) - except: - context = create_context_with_type( - store=store, - context_name=context_name, - type_name=type_name, - properties=properties, - type_properties=type_properties, - custom_properties=custom_properties, - ) - return context - - # Verifying that the context has the expected type name - context_types = store.get_context_types_by_id([context.type_id]) - assert len(context_types) == 1 - if context_types[0].name != type_name: - raise RuntimeError('Context "{}" was found, but it has type "{}" instead of "{}"'.format(context_name, context_types[0].name, type_name)) - return context - - -def create_new_execution_in_existing_context( - store, - execution_type_name: str, - context_id: int, - properties: dict = None, - execution_type_properties: dict = None, - custom_properties: dict = None, -) -> metadata_store_pb2.Execution: - execution = create_execution_with_type( - store=store, - properties=properties, - custom_properties=custom_properties, - type_name=execution_type_name, - type_properties=execution_type_properties, - ) - association = metadata_store_pb2.Association( - execution_id=execution.id, - context_id=context_id, - ) - - store.put_attributions_and_associations([], [association]) - return execution - - -RUN_CONTEXT_TYPE_NAME = "KfpRun" -KFP_EXECUTION_TYPE_NAME_PREFIX = 'components.' - -ARTIFACT_IO_NAME_PROPERTY_NAME = "name" -EXECUTION_COMPONENT_ID_PROPERTY_NAME = "component_id"# ~= Task ID - -#TODO: Get rid of these when https://github.com/tensorflow/tfx/issues/905 and https://github.com/kubeflow/pipelines/issues/2562 are fixed -ARTIFACT_PIPELINE_NAME_PROPERTY_NAME = "pipeline_name" -EXECUTION_PIPELINE_NAME_PROPERTY_NAME = "pipeline_name" -CONTEXT_PIPELINE_NAME_PROPERTY_NAME = "pipeline_name" -ARTIFACT_RUN_ID_PROPERTY_NAME = "run_id" -EXECUTION_RUN_ID_PROPERTY_NAME = "run_id" -CONTEXT_RUN_ID_PROPERTY_NAME = "run_id" - -KFP_POD_NAME_EXECUTION_PROPERTY_NAME = 'kfp_pod_name' - -ARTIFACT_ARGO_ARTIFACT_PROPERTY_NAME = 'argo_artifact' - - -def get_or_create_run_context( - store, - run_id: str, -) -> metadata_store_pb2.Context: - context = get_or_create_context_with_type( - store=store, - context_name=run_id, - type_name=RUN_CONTEXT_TYPE_NAME, - type_properties={ - CONTEXT_PIPELINE_NAME_PROPERTY_NAME: metadata_store_pb2.STRING, - CONTEXT_RUN_ID_PROPERTY_NAME: metadata_store_pb2.STRING, - }, - properties={ - CONTEXT_PIPELINE_NAME_PROPERTY_NAME: metadata_store_pb2.Value(string_value=run_id), - CONTEXT_RUN_ID_PROPERTY_NAME: metadata_store_pb2.Value(string_value=run_id), - }, - ) - return context - - -def create_new_execution_in_existing_run_context( - store, - execution_type_name: str, - context_id: int, - pod_name: str, - # TODO: Remove when UX stops relying on thsese properties - pipeline_name: str = None, - run_id: str = None, - instance_id: str = None, - custom_properties = None, -) -> metadata_store_pb2.Execution: - pipeline_name = pipeline_name or 'Context_' + str(context_id) + '_pipeline' - run_id = run_id or 'Context_' + str(context_id) + '_run' - instance_id = instance_id or execution_type_name - mlmd_custom_properties = {} - for property_name, property_value in (custom_properties or {}).items(): - mlmd_custom_properties[property_name] = value_to_mlmd_value(property_value) - mlmd_custom_properties[KFP_POD_NAME_EXECUTION_PROPERTY_NAME] = metadata_store_pb2.Value(string_value=pod_name) - return create_new_execution_in_existing_context( - store=store, - execution_type_name=execution_type_name, - context_id=context_id, - execution_type_properties={ - EXECUTION_PIPELINE_NAME_PROPERTY_NAME: metadata_store_pb2.STRING, - EXECUTION_RUN_ID_PROPERTY_NAME: metadata_store_pb2.STRING, - EXECUTION_COMPONENT_ID_PROPERTY_NAME: metadata_store_pb2.STRING, - }, - # TODO: Remove when UX stops relying on thsese properties - properties={ - EXECUTION_PIPELINE_NAME_PROPERTY_NAME: metadata_store_pb2.Value(string_value=pipeline_name), # Mistakenly used for grouping in the UX - EXECUTION_RUN_ID_PROPERTY_NAME: metadata_store_pb2.Value(string_value=run_id), - EXECUTION_COMPONENT_ID_PROPERTY_NAME: metadata_store_pb2.Value(string_value=instance_id), # should set to task ID, not component ID - }, - custom_properties=mlmd_custom_properties, - ) - - -def create_new_artifact_event_and_attribution( - store, - execution_id: int, - context_id: int, - uri: str, - type_name: str, - event_type: metadata_store_pb2.Event.Type, - properties: dict = None, - artifact_type_properties: dict = None, - custom_properties: dict = None, - artifact_name_path: metadata_store_pb2.Event.Path = None, - milliseconds_since_epoch: int = None, -) -> metadata_store_pb2.Artifact: - artifact = create_artifact_with_type( - store=store, - uri=uri, - type_name=type_name, - type_properties=artifact_type_properties, - properties=properties, - custom_properties=custom_properties, - ) - event = metadata_store_pb2.Event( - execution_id=execution_id, - artifact_id=artifact.id, - type=event_type, - path=artifact_name_path, - milliseconds_since_epoch=milliseconds_since_epoch, - ) - store.put_events([event]) - - attribution = metadata_store_pb2.Attribution( - context_id=context_id, - artifact_id=artifact.id, - ) - store.put_attributions_and_associations([attribution], []) - - return artifact - - -def link_execution_to_input_artifact( - store, - execution_id: int, - uri: str, - input_name: str, -) -> metadata_store_pb2.Artifact: - artifacts = store.get_artifacts_by_uri(uri) - if len(artifacts) == 0: - print('Error: Not found upstream artifact with URI={}.'.format(uri), file=sys.stderr) - return None - if len(artifacts) > 1: - print('Error: Found multiple artifacts with the same URI. {} Using the last one..'.format(artifacts), file=sys.stderr) - - artifact = artifacts[-1] - - event = metadata_store_pb2.Event( - execution_id=execution_id, - artifact_id=artifact.id, - type=metadata_store_pb2.Event.INPUT, - path=metadata_store_pb2.Event.Path( - steps=[ - metadata_store_pb2.Event.Path.Step( - key=input_name, - ), - ] - ), - ) - store.put_events([event]) - return artifact - - -def create_new_output_artifact( - store, - execution_id: int, - context_id: int, - uri: str, - type_name: str, - output_name: str, - run_id: str = None, - argo_artifact: dict = None, -) -> metadata_store_pb2.Artifact: - custom_properties = { - ARTIFACT_IO_NAME_PROPERTY_NAME: metadata_store_pb2.Value(string_value=output_name), - } - if run_id: - custom_properties[ARTIFACT_PIPELINE_NAME_PROPERTY_NAME] = metadata_store_pb2.Value(string_value=str(run_id)) - custom_properties[ARTIFACT_RUN_ID_PROPERTY_NAME] = metadata_store_pb2.Value(string_value=str(run_id)) - if argo_artifact: - custom_properties[ARTIFACT_ARGO_ARTIFACT_PROPERTY_NAME] = metadata_store_pb2.Value(string_value=json.dumps(argo_artifact, sort_keys=True)) - return create_new_artifact_event_and_attribution( - store=store, - execution_id=execution_id, - context_id=context_id, - uri=uri, - type_name=type_name, - event_type=metadata_store_pb2.Event.OUTPUT, - artifact_name_path=metadata_store_pb2.Event.Path( - steps=[ - metadata_store_pb2.Event.Path.Step( - key=output_name, - #index=0, - ), - ] - ), - custom_properties=custom_properties, - #milliseconds_since_epoch=int(datetime.now(timezone.utc).timestamp() * 1000), # Happens automatically - ) - -def isIPv6(ip: str) -> bool: - try: - return False if type(ip_address(ip)) is IPv4Address else True - except Exception as e: - print('Error: Exception:{}'.format(str(e)), file=sys.stderr) - sys.stderr.flush() - - diff --git a/backend/metadata_writer/src/metadata_writer.py b/backend/metadata_writer/src/metadata_writer.py deleted file mode 100644 index e305bb1e15b..00000000000 --- a/backend/metadata_writer/src/metadata_writer.py +++ /dev/null @@ -1,406 +0,0 @@ -# Copyright 2019 The Kubeflow Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -import hashlib -import os -import re -import collections -import kubernetes -import yaml -import urllib3 -from time import sleep -import lru - -from metadata_helpers import * - - -namespace_to_watch = os.environ.get('NAMESPACE_TO_WATCH', 'default') -pod_name_to_execution_id_size = os.environ.get('POD_NAME_TO_EXECUTION_ID_SIZE', 5000) -workflow_name_to_context_id_size = os.environ.get('WORKFLOW_NAME_TO_CONTEXT_ID_SIZE', 5000) -pods_with_written_metadata_size = os.environ.get('PODS_WITH_WRITTEN_METADATA_SIZE', 5000) -debug_files_size = os.environ.get('DEBUG_FILES_SIZE', 5000) -# See the documentation on settings k8s_watch timeouts: -# https://github.com/kubernetes-client/python/blob/master/examples/watch/timeout-settings.md -k8s_watch_server_side_timeout = os.environ.get('K8S_WATCH_SERVER_SIDE_TIMEOUT', 1800) -k8s_watch_client_side_timeout = os.environ.get('K8S_WATCH_CLIENT_SIDE_TIMEOUT', 60) - -kubernetes.config.load_incluster_config() -k8s_api = kubernetes.client.CoreV1Api() -k8s_watch = kubernetes.watch.Watch() - - -patch_retries = 20 -sleep_time = 0.1 - - -def patch_pod_metadata( - namespace: str, - pod_name: str, - patch: dict, - k8s_api: kubernetes.client.CoreV1Api = None, -): - k8s_api = k8s_api or kubernetes.client.CoreV1Api() - patch = { - 'metadata': patch - } - for retry in range(patch_retries): - try: - pod = k8s_api.patch_namespaced_pod( - name=pod_name, - namespace=namespace, - body=patch, - ) - return pod - except Exception as e: - print(e) - sleep(sleep_time) - - -#Connecting to MetadataDB -mlmd_store = connect_to_mlmd() -print("Connected to the metadata store") - - -ARGO_OUTPUTS_ANNOTATION_KEY = 'workflows.argoproj.io/outputs' -ARGO_TEMPLATE_ENV_KEY = 'ARGO_TEMPLATE' -KFP_COMPONENT_SPEC_ANNOTATION_KEY = 'pipelines.kubeflow.org/component_spec' -KFP_PARAMETER_ARGUMENTS_ANNOTATION_KEY = 'pipelines.kubeflow.org/arguments.parameters' -METADATA_EXECUTION_ID_LABEL_KEY = 'pipelines.kubeflow.org/metadata_execution_id' -METADATA_CONTEXT_ID_LABEL_KEY = 'pipelines.kubeflow.org/metadata_context_id' -KFP_SDK_TYPE_LABEL_KEY = 'pipelines.kubeflow.org/pipeline-sdk-type' -TFX_SDK_TYPE_VALUE = 'tfx' -METADATA_ARTIFACT_IDS_ANNOTATION_KEY = 'pipelines.kubeflow.org/metadata_artifact_ids' -METADATA_INPUT_ARTIFACT_IDS_ANNOTATION_KEY = 'pipelines.kubeflow.org/metadata_input_artifact_ids' -METADATA_OUTPUT_ARTIFACT_IDS_ANNOTATION_KEY = 'pipelines.kubeflow.org/metadata_output_artifact_ids' -KFP_V2_COMPONENT_ANNOTATION_KEY = 'pipelines.kubeflow.org/v2_component' -KFP_V2_COMPONENT_ANNOTATION_VALUE = 'true' - -ARGO_WORKFLOW_LABEL_KEY = 'workflows.argoproj.io/workflow' -ARGO_COMPLETED_LABEL_KEY = 'workflows.argoproj.io/completed' -METADATA_WRITTEN_LABEL_KEY = 'pipelines.kubeflow.org/metadata_written' - - -def output_name_to_argo(name: str) -> str: - import re - # This sanitization code should be kept in sync with the code in the DSL compiler. - # See https://github.com/kubeflow/pipelines/blob/39975e3cde7ba4dcea2bca835b92d0fe40b1ae3c/sdk/python/kfp/compiler/_k8s_helper.py#L33 - return re.sub('-+', '-', re.sub('[^-_0-9A-Za-z]+', '-', name)).strip('-') - -def is_s3_endpoint(endpoint: str) -> bool: - return re.search('^.*s3.*amazonaws.com.*$', endpoint) - -def get_object_store_provider(endpoint: str) -> bool: - if is_s3_endpoint(endpoint): - return 's3' - else: - return 'minio' - -def argo_artifact_to_uri(artifact: dict) -> str: - # s3 here means s3 compatible object storage. not AWS S3. - if 's3' in artifact: - s3_artifact = artifact['s3'] - return '{provider}://{bucket}/{key}'.format( - provider=get_object_store_provider(s3_artifact.get('endpoint', '')), - bucket=s3_artifact.get('bucket', ''), - key=s3_artifact.get('key', ''), - ) - elif 'raw' in artifact: - return None - else: - return None - - -def is_tfx_pod(pod) -> bool: - # The label defaults to 'tfx', but is overridable. - # Official tfx templates override the value to 'tfx-template', so - # we loosely match the word 'tfx'. - if TFX_SDK_TYPE_VALUE in pod.metadata.labels.get(KFP_SDK_TYPE_LABEL_KEY, ''): - return True - main_containers = [container for container in pod.spec.containers if container.name == 'main'] - if len(main_containers) != 1: - return False - main_container = main_containers[0] - return main_container.command and main_container.command[-1].endswith('tfx/orchestration/kubeflow/container_entrypoint.py') - -def is_kfp_v2_pod(pod) -> bool: - return pod.metadata.annotations.get(KFP_V2_COMPONENT_ANNOTATION_KEY) == KFP_V2_COMPONENT_ANNOTATION_VALUE - -# Caches (not expected to be persistent) -# These caches are only used to prevent race conditions. Race conditions happen because the writer can see multiple versions of K8s object before the applied labels show up. -# They are expected to be lost when restarting the service. -# The operation of the Metadata Writer remains correct even if it's getting restarted frequently. (Kubernetes only sends the latest version of resource for new watchers.) -# Technically, we could remove the objects from cache as soon as we see that our labels have been applied successfully. -pod_name_to_execution_id = lru.LRU(pod_name_to_execution_id_size) -workflow_name_to_context_id = lru.LRU(workflow_name_to_context_id_size) -pods_with_written_metadata = lru.LRU(pods_with_written_metadata_size) -debug_paths = collections.deque() - -while True: - print("Start watching Kubernetes Pods created by Argo") - if namespace_to_watch: - pod_stream = k8s_watch.stream( - k8s_api.list_namespaced_pod, - namespace=namespace_to_watch, - label_selector=ARGO_WORKFLOW_LABEL_KEY, - timeout_seconds=k8s_watch_server_side_timeout, - _request_timeout=k8s_watch_client_side_timeout, - ) - else: - pod_stream = k8s_watch.stream( - k8s_api.list_pod_for_all_namespaces, - label_selector=ARGO_WORKFLOW_LABEL_KEY, - timeout_seconds=k8s_watch_server_side_timeout, - _request_timeout=k8s_watch_client_side_timeout, - ) - try: - for event in pod_stream: - obj = event['object'] - print('Kubernetes Pod event: ', event['type'], obj.metadata.name, obj.metadata.resource_version) - if event['type'] == 'ERROR': - print(event) - - pod_name = obj.metadata.name - - # Logging pod changes for debugging - debug_path = '/tmp/pod_' + obj.metadata.name + '_' + obj.metadata.resource_version - with open(debug_path, 'w') as f: - f.write(yaml.dump(obj.to_dict())) - debug_paths.append(debug_path) - - # Do some housekeeping, ensure we only keep a fixed size buffer of debug files so we don't - # grow the disk size indefinitely for long running pods. - if len(debug_paths) > debug_files_size: - os.remove(debug_paths.popleft()) - - assert obj.kind == 'Pod' - - if METADATA_WRITTEN_LABEL_KEY in obj.metadata.labels: - continue - - # Skip TFX pods - they have their own metadata writers - if is_tfx_pod(obj): - continue - - # Skip KFP v2 pods - they have their own metadat writers - if is_kfp_v2_pod(obj): - continue - - argo_workflow_name = obj.metadata.labels[ARGO_WORKFLOW_LABEL_KEY] # Should exist due to initial filtering - argo_template = {} - for env in obj.spec.containers[0].env: - if env.name == ARGO_TEMPLATE_ENV_KEY: - argo_template = json.loads(env.value) - break - - # Should we throw error instead if argo template not found? - argo_template_name = argo_template.get('name', '') - - component_name = argo_template_name - component_version = component_name - argo_output_name_to_type = {} - if KFP_COMPONENT_SPEC_ANNOTATION_KEY in obj.metadata.annotations: - component_spec_text = obj.metadata.annotations[KFP_COMPONENT_SPEC_ANNOTATION_KEY] - component_spec = json.loads(component_spec_text) - component_spec_digest = hashlib.sha256(component_spec_text.encode()).hexdigest() - component_name = component_spec.get('name', component_name) - component_version = component_name + '@sha256=' + component_spec_digest - output_name_to_type = {output['name']: output.get('type', None) for output in component_spec.get('outputs', [])} - argo_output_name_to_type = {output_name_to_argo(k): v for k, v in output_name_to_type.items() if v} - - if obj.metadata.name in pod_name_to_execution_id: - execution_id = pod_name_to_execution_id[obj.metadata.name] - context_id = workflow_name_to_context_id[argo_workflow_name] - elif METADATA_EXECUTION_ID_LABEL_KEY in obj.metadata.labels: - execution_id = int(obj.metadata.labels[METADATA_EXECUTION_ID_LABEL_KEY]) - context_id = int(obj.metadata.labels[METADATA_CONTEXT_ID_LABEL_KEY]) - print('Found execution id: {}, context id: {} for pod {}.'.format(execution_id, context_id, obj.metadata.name)) - else: - run_context = get_or_create_run_context( - store=mlmd_store, - run_id=argo_workflow_name, # We can switch to internal run IDs once backend starts adding them - ) - - # Saving input paramater arguments - execution_custom_properties = {} - if KFP_PARAMETER_ARGUMENTS_ANNOTATION_KEY in obj.metadata.annotations: - parameter_arguments_json = obj.metadata.annotations[KFP_PARAMETER_ARGUMENTS_ANNOTATION_KEY] - try: - parameter_arguments = json.loads(parameter_arguments_json) - for paramater_name, parameter_value in parameter_arguments.items(): - execution_custom_properties['input:' + paramater_name] = parameter_value - except Exception: - pass - - # Adding new execution to the database - execution = create_new_execution_in_existing_run_context( - store=mlmd_store, - context_id=run_context.id, - execution_type_name=KFP_EXECUTION_TYPE_NAME_PREFIX + component_version, - pod_name=pod_name, - pipeline_name=argo_workflow_name, - run_id=argo_workflow_name, - instance_id=component_name, - custom_properties=execution_custom_properties, - ) - - argo_input_artifacts = argo_template.get('inputs', {}).get('artifacts', []) - input_artifact_ids = [] - for argo_artifact in argo_input_artifacts: - artifact_uri = argo_artifact_to_uri(argo_artifact) - if not artifact_uri: - continue - - input_name = argo_artifact.get('path', '') # Every artifact should have a path in Argo - input_artifact_path_prefix = '/tmp/inputs/' - input_artifact_path_postfix = '/data' - if input_name.startswith(input_artifact_path_prefix): - input_name = input_name[len(input_artifact_path_prefix):] - if input_name.endswith(input_artifact_path_postfix): - input_name = input_name[0: -len(input_artifact_path_postfix)] - - artifact = link_execution_to_input_artifact( - store=mlmd_store, - execution_id=execution.id, - uri=artifact_uri, - input_name=input_name, - ) - if artifact is None: - # TODO: Maybe there is a better way to handle missing upstream artifacts - continue - - input_artifact_ids.append(dict( - id=artifact.id, - name=input_name, - uri=artifact.uri, - )) - print('Found Input Artifact: ' + str(dict( - input_name=input_name, - id=artifact.id, - uri=artifact.uri, - ))) - - execution_id = execution.id - context_id = run_context.id - - obj.metadata.labels[METADATA_EXECUTION_ID_LABEL_KEY] = execution_id - obj.metadata.labels[METADATA_CONTEXT_ID_LABEL_KEY] = context_id - - metadata_to_add = { - 'labels': { - METADATA_EXECUTION_ID_LABEL_KEY: str(execution_id), - METADATA_CONTEXT_ID_LABEL_KEY: str(context_id), - }, - 'annotations': { - METADATA_INPUT_ARTIFACT_IDS_ANNOTATION_KEY: json.dumps(input_artifact_ids), - }, - } - - patch_pod_metadata( - namespace=obj.metadata.namespace, - pod_name=obj.metadata.name, - patch=metadata_to_add, - ) - pod_name_to_execution_id[obj.metadata.name] = execution_id - workflow_name_to_context_id[argo_workflow_name] = context_id - - print('New execution id: {}, context id: {} for pod {}.'.format(execution_id, context_id, obj.metadata.name)) - - print('Execution: ' + str(dict( - context_id=context_id, - context_name=argo_workflow_name, - execution_id=execution_id, - execution_name=obj.metadata.name, - component_name=component_name, - ))) - - # TODO: Log input parameters as execution options. - # Unfortunately, DSL compiler loses the information about inputs and their arguments. - - if ( - obj.metadata.name not in pods_with_written_metadata - and ( - obj.metadata.labels.get(ARGO_COMPLETED_LABEL_KEY, 'false') == 'true' - or ARGO_OUTPUTS_ANNOTATION_KEY in obj.metadata.annotations - ) - ): - artifact_ids = [] - - if ARGO_OUTPUTS_ANNOTATION_KEY in obj.metadata.annotations: # Should be present - argo_outputs = json.loads(obj.metadata.annotations[ARGO_OUTPUTS_ANNOTATION_KEY]) - argo_output_artifacts = {} - - for artifact in argo_outputs.get('artifacts', []): - art_name = artifact['name'] - output_prefix = argo_template_name + '-' - if art_name.startswith(output_prefix): - art_name = art_name[len(output_prefix):] - argo_output_artifacts[art_name] = artifact - - output_artifacts = [] - for name, art in argo_output_artifacts.items(): - artifact_uri = argo_artifact_to_uri(art) - if not artifact_uri: - continue - artifact_type_name = argo_output_name_to_type.get(name, 'NoType') # Cannot be None or '' - - print('Adding Output Artifact: ' + str(dict( - output_name=name, - uri=artifact_uri, - type=artifact_type_name, - ))) - - artifact = create_new_output_artifact( - store=mlmd_store, - execution_id=execution_id, - context_id=context_id, - uri=artifact_uri, - type_name=artifact_type_name, - output_name=name, - #run_id='Context_' + str(context_id) + '_run', - run_id=argo_workflow_name, - argo_artifact=art, - ) - - artifact_ids.append(dict( - id=artifact.id, - name=name, - uri=artifact_uri, - type=artifact_type_name, - )) - - metadata_to_add = { - 'labels': { - METADATA_WRITTEN_LABEL_KEY: 'true', - }, - 'annotations': { - METADATA_OUTPUT_ARTIFACT_IDS_ANNOTATION_KEY: json.dumps(artifact_ids), - }, - } - - patch_pod_metadata( - namespace=obj.metadata.namespace, - pod_name=obj.metadata.name, - patch=metadata_to_add, - ) - - pods_with_written_metadata[obj.metadata.name] = None - - # If the for loop ended, a server-side timeout occurred. Continue watching. - pass - - except urllib3.exceptions.ReadTimeoutError as e: - # Client side timeout, continue watching. - continue - \ No newline at end of file diff --git a/backend/metadata_writer/update_requirements.sh b/backend/metadata_writer/update_requirements.sh deleted file mode 100755 index 285f7f332da..00000000000 --- a/backend/metadata_writer/update_requirements.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -# This image should be in sync with Dockerfile. -IMAGE="python:3.11" -../../hack/update-requirements.sh $IMAGE requirements.txt diff --git a/backend/src/apiserver/client_manager/client_manager.go b/backend/src/apiserver/client_manager/client_manager.go index f289f23e431..a2c7913c6ec 100644 --- a/backend/src/apiserver/client_manager/client_manager.go +++ b/backend/src/apiserver/client_manager/client_manager.go @@ -30,6 +30,7 @@ import ( "github.com/kubeflow/pipelines/backend/src/apiserver/client" "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/apiserver/model" + "github.com/kubeflow/pipelines/backend/src/apiserver/resource" "github.com/kubeflow/pipelines/backend/src/apiserver/storage" "github.com/kubeflow/pipelines/backend/src/apiserver/validation" "github.com/kubeflow/pipelines/backend/src/common/util" @@ -83,7 +84,10 @@ func init() { } } -// Container for all service clients. +// Ensure that ClientManager implements the resource.ClientManagerInterface interface. +var _ resource.ClientManagerInterface = &ClientManager{} + +// ClientManager Container for all service clients. type ClientManager struct { db *storage.DB experimentStore storage.ExperimentStoreInterface @@ -91,6 +95,8 @@ type ClientManager struct { jobStore storage.JobStoreInterface runStore storage.RunStoreInterface taskStore storage.TaskStoreInterface + artifactStore storage.ArtifactStoreInterface + artifactTaskStore storage.ArtifactTaskStoreInterface resourceReferenceStore storage.ResourceReferenceStoreInterface dBStatusStore storage.DBStatusStoreInterface defaultExperimentStore storage.DefaultExperimentStoreInterface @@ -144,6 +150,14 @@ func (c *ClientManager) RunStore() storage.RunStoreInterface { return c.runStore } +func (c *ClientManager) ArtifactStore() storage.ArtifactStoreInterface { + return c.artifactStore +} + +func (c *ClientManager) ArtifactTaskStore() storage.ArtifactTaskStoreInterface { + return c.artifactTaskStore +} + func (c *ClientManager) ResourceReferenceStore() storage.ResourceReferenceStoreInterface { return c.resourceReferenceStore } @@ -296,6 +310,8 @@ func (c *ClientManager) init(options *Options) error { runStore := storage.NewRunStore(db, c.time) c.runStore = runStore + c.artifactStore = storage.NewArtifactStore(db, c.time, c.uuid) + c.artifactTaskStore = storage.NewArtifactTaskStore(db, c.uuid) // Log archive c.logArchive = initLogArchive() @@ -554,6 +570,8 @@ func autoMigrate(db *gorm.DB) error { glog.Infof("Running AutoMigrate.") if err := db.AutoMigrate( + &model.Artifact{}, + &model.ArtifactTask{}, &model.DBStatus{}, &model.DefaultExperiment{}, &model.Experiment{}, @@ -561,7 +579,7 @@ func autoMigrate(db *gorm.DB) error { &model.PipelineVersion{}, &model.Job{}, &model.Run{}, - &model.RunMetric{}, + &model.RunMetricV1{}, &model.Task{}, &model.ResourceReference{}, ); err != nil { diff --git a/backend/src/apiserver/client_manager/client_manager_test.go b/backend/src/apiserver/client_manager/client_manager_test.go index c68b600f52b..474b7230d50 100644 --- a/backend/src/apiserver/client_manager/client_manager_test.go +++ b/backend/src/apiserver/client_manager/client_manager_test.go @@ -97,12 +97,3 @@ func TestRunPreflightLengthChecks_PassWhenOK(t *testing.T) { }) require.NoError(t, err) } - -func TestFieldMeta_TaskRunId(t *testing.T) { - // FieldMeta only inspects schema; sqlite driver is sufficient. - db := getTestSQLite(t) - table, dbCol, err := FieldMeta(db, &model.Task{}, "RunID") - require.NoError(t, err) - assert.Equal(t, "tasks", table) - assert.Equal(t, "RunUUID", dbCol) -} diff --git a/backend/src/apiserver/common/config.go b/backend/src/apiserver/common/config.go index 05aa2d5310c..2e162acdd2e 100644 --- a/backend/src/apiserver/common/config.go +++ b/backend/src/apiserver/common/config.go @@ -32,7 +32,6 @@ const ( KubeflowUserIDPrefix string = "KUBEFLOW_USERID_PREFIX" UpdatePipelineVersionByDefault string = "AUTO_UPDATE_PIPELINE_DEFAULT_VERSION" TokenReviewAudience string = "TOKEN_REVIEW_AUDIENCE" - MetadataTLSEnabled string = "METADATA_TLS_ENABLED" CaBundleSecretName string = "CABUNDLE_SECRET_NAME" RequireNamespaceForPipelines string = "REQUIRE_NAMESPACE_FOR_PIPELINES" CompiledPipelineSpecPatch string = "COMPILED_PIPELINE_SPEC_PATCH" @@ -136,10 +135,6 @@ func GetTokenReviewAudience() string { return GetStringConfigWithDefault(TokenReviewAudience, DefaultTokenReviewAudience) } -func GetMetadataTLSEnabled() bool { - return GetBoolConfigWithDefault(MetadataTLSEnabled, DefaultMetadataTLSEnabled) -} - func GetCaBundleSecretName() string { return GetStringConfigWithDefault(CaBundleSecretName, "") } diff --git a/backend/src/apiserver/common/const.go b/backend/src/apiserver/common/const.go index b525ea213bf..d4121847a37 100644 --- a/backend/src/apiserver/common/const.go +++ b/backend/src/apiserver/common/const.go @@ -27,21 +27,21 @@ const ( RbacResourceTypeVisualizations = "visualizations" RbacResourceTypeScheduledWorkflows = "scheduledworkflows" RbacResourceTypeWorkflows = "workflows" - - RbacResourceVerbArchive = "archive" - RbacResourceVerbUpdate = "update" - RbacResourceVerbCreate = "create" - RbacResourceVerbDelete = "delete" - RbacResourceVerbDisable = "disable" - RbacResourceVerbEnable = "enable" - RbacResourceVerbGet = "get" - RbacResourceVerbList = "list" - RbacResourceVerbRetry = "retry" - RbacResourceVerbTerminate = "terminate" - RbacResourceVerbUnarchive = "unarchive" - RbacResourceVerbReportMetrics = "reportMetrics" - RbacResourceVerbReadArtifact = "readArtifact" - RbacResourceVerbReport = "report" + RbacResourceTypeArtifacts = "artifacts" + RbacResourceVerbArchive = "archive" + RbacResourceVerbUpdate = "update" + RbacResourceVerbCreate = "create" + RbacResourceVerbDelete = "delete" + RbacResourceVerbDisable = "disable" + RbacResourceVerbEnable = "enable" + RbacResourceVerbGet = "get" + RbacResourceVerbList = "list" + RbacResourceVerbRetry = "retry" + RbacResourceVerbTerminate = "terminate" + RbacResourceVerbUnarchive = "unarchive" + RbacResourceVerbReportMetrics = "reportMetrics" + RbacResourceVerbReadArtifact = "readArtifact" + RbacResourceVerbReport = "report" ) const ( @@ -53,8 +53,6 @@ const ( const DefaultTokenReviewAudience string = "pipelines.kubeflow.org" -const DefaultMetadataTLSEnabled = false - const ( DefaultPipelineRunnerServiceAccount = "pipeline-runner" HasDefaultBucketEnvVar = "HAS_DEFAULT_BUCKET" diff --git a/backend/src/apiserver/common/utils.go b/backend/src/apiserver/common/utils.go index 9f8e1168648..09f35eb0063 100644 --- a/backend/src/apiserver/common/utils.go +++ b/backend/src/apiserver/common/utils.go @@ -106,7 +106,7 @@ func PatchPipelineDefaultParameter(text string) (string, error) { return text, nil } -// Validates a pipeline name to match MLMD requirements. +// ValidatePipelineName Validates a pipeline name. func ValidatePipelineName(pipelineName string) error { if pipelineName == "" { return util.NewInvalidInputError("pipeline's name cannot be empty") diff --git a/backend/src/apiserver/config/config_test.go b/backend/src/apiserver/config/config_test.go index 79bcb5d6ac2..53b8eed4544 100644 --- a/backend/src/apiserver/config/config_test.go +++ b/backend/src/apiserver/config/config_test.go @@ -288,6 +288,7 @@ func TestLoadSamplesMultiplePipelineVersionsInConfig(t *testing.T) { require.NoError(t, err) _, totalSize, _, err := rm.ListPipelineVersions(pipeline.UUID, opts) + require.NoError(t, err) require.Equal(t, totalSize, 2) } diff --git a/backend/src/apiserver/config/proxy/config.go b/backend/src/apiserver/config/proxy/config.go index f94748e6119..1c3561e0aaf 100644 --- a/backend/src/apiserver/config/proxy/config.go +++ b/backend/src/apiserver/config/proxy/config.go @@ -11,6 +11,7 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. + package proxy import ( diff --git a/backend/src/apiserver/main.go b/backend/src/apiserver/main.go index ddd4eecaeb3..077b9fa1b2b 100644 --- a/backend/src/apiserver/main.go +++ b/backend/src/apiserver/main.go @@ -278,13 +278,13 @@ func startRPCServer(resourceManager *resource.ResourceManager, tlsCfg *tls.Confi ReportServerV1 := server.NewReportServerV1(resourceManager) ReportServer := server.NewReportServer(resourceManager) + ArtifactServer := server.NewArtifactServer(resourceManager) + apiv1beta1.RegisterExperimentServiceServer(s, ExperimentServerV1) apiv1beta1.RegisterPipelineServiceServer(s, PipelineServerV1) apiv1beta1.RegisterJobServiceServer(s, JobServerV1) apiv1beta1.RegisterRunServiceServer(s, RunServerV1) - apiv1beta1.RegisterTaskServiceServer(s, server.NewTaskServer(resourceManager)) apiv1beta1.RegisterReportServiceServer(s, ReportServerV1) - apiv1beta1.RegisterVisualizationServiceServer( s, server.NewVisualizationServer( @@ -293,12 +293,12 @@ func startRPCServer(resourceManager *resource.ResourceManager, tlsCfg *tls.Confi common.GetStringConfig(cm.VisualizationServicePort), )) apiv1beta1.RegisterAuthServiceServer(s, server.NewAuthServer(resourceManager)) - apiv2beta1.RegisterExperimentServiceServer(s, ExperimentServer) apiv2beta1.RegisterPipelineServiceServer(s, PipelineServer) apiv2beta1.RegisterRecurringRunServiceServer(s, JobServer) apiv2beta1.RegisterRunServiceServer(s, RunServer) apiv2beta1.RegisterReportServiceServer(s, ReportServer) + apiv2beta1.RegisterArtifactServiceServer(s, ArtifactServer) // Register reflection service on gRPC server. reflection.Register(s) @@ -341,6 +341,7 @@ func startHTTPProxy(resourceManager *resource.ResourceManager, usePipelinesKuber registerHTTPHandlerFromEndpoint(apiv2beta1.RegisterRecurringRunServiceHandlerFromEndpoint, "RecurringRunService", ctx, runtimeMux, tlsCfg) registerHTTPHandlerFromEndpoint(apiv2beta1.RegisterRunServiceHandlerFromEndpoint, "RunService", ctx, runtimeMux, tlsCfg) registerHTTPHandlerFromEndpoint(apiv2beta1.RegisterReportServiceHandlerFromEndpoint, "ReportService", ctx, runtimeMux, tlsCfg) + registerHTTPHandlerFromEndpoint(apiv2beta1.RegisterArtifactServiceHandlerFromEndpoint, "ArtifactService", ctx, runtimeMux, tlsCfg) // Create a top level mux to include both pipeline upload server and gRPC servers. topMux := mux.NewRouter() @@ -487,7 +488,10 @@ func initConfig() { viper.WatchConfig() viper.OnConfigChange(func(e fsnotify.Event) { // Read in config again - viper.ReadInConfig() + err := viper.ReadInConfig() + if err != nil { + return + } }) proxy.InitializeConfigWithEnv() diff --git a/backend/src/apiserver/model/artifact.go b/backend/src/apiserver/model/artifact.go new file mode 100644 index 00000000000..84d70a9726e --- /dev/null +++ b/backend/src/apiserver/model/artifact.go @@ -0,0 +1,107 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package model contains data models for the KFP API server. +package model + +import apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + +type ArtifactType apiv2beta1.Artifact_ArtifactType + +// Artifact represents an artifact in the KFP system +type Artifact struct { + UUID string `gorm:"column:UUID; not null; primaryKey; type:varchar(191);"` + Namespace string `gorm:"column:Namespace; not null; type:varchar(63); index:idx_type_namespace,priority:1;"` + Type ArtifactType `gorm:"column:Type; default:null; index:idx_type_namespace,priority:2;"` + URI *string `gorm:"column:URI; type:text;"` + Name string `gorm:"column:Name; type:varchar(128); default:null;"` + Description string `gorm:"column:Description; type:text; default:null;"` + CreatedAtInSec int64 `gorm:"column:CreatedAtInSec; not null; default:0; index:idx_artifact_created_timestamp;"` + LastUpdateInSec int64 `gorm:"column:LastUpdateInSec; not null; default:0; index:idx_artifact_last_update_timestamp;"` + Metadata JSONData `gorm:"column:Metadata; type:json; default:null;"` + // Used primarily for metrics + NumberValue *float64 `gorm:"column:NumberValue; default:null;"` +} + +func (a Artifact) PrimaryKeyColumnName() string { + return "UUID" +} + +func (a Artifact) DefaultSortField() string { + return "CreatedAtInSec" +} + +func (a Artifact) APIToModelFieldMap() map[string]string { + return artifactAPIToModelFieldMap +} + +func (a Artifact) GetModelName() string { + return "artifacts" +} + +func (a Artifact) GetSortByFieldPrefix(s string) string { + return "artifacts." +} + +func (a Artifact) GetKeyFieldPrefix() string { + return "artifacts." +} + +var artifactAPIToModelFieldMap = map[string]string{ + "artifact_id": "UUID", + "id": "UUID", + "namespace": "Namespace", + "type": "Type", + "uri": "URI", + "name": "Name", + "description": "Description", + "created_at": "CreatedAtInSec", + "last_update": "LastUpdateInSec", + "metadata": "Metadata", + "number_value": "NumberValue", +} + +func (a Artifact) GetField(name string) (string, bool) { + if field, ok := artifactAPIToModelFieldMap[name]; ok { + return field, true + } + return "", false +} + +func (a Artifact) GetFieldValue(name string) interface{} { + switch name { + case "UUID": + return a.UUID + case "Namespace": + return a.Namespace + case "Type": + return a.Type + case "URI": + return a.URI + case "Name": + return a.Name + case "Description": + return a.Description + case "CreatedAtInSec": + return a.CreatedAtInSec + case "LastUpdateInSec": + return a.LastUpdateInSec + case "Metadata": + return a.Metadata + case "NumberValue": + return a.NumberValue + default: + return nil + } +} diff --git a/backend/src/apiserver/model/artifact_task.go b/backend/src/apiserver/model/artifact_task.go new file mode 100644 index 00000000000..e0b388696a8 --- /dev/null +++ b/backend/src/apiserver/model/artifact_task.go @@ -0,0 +1,98 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + +// IOType represents the I/O relationship type +type IOType apiv2beta1.IOType + +// ArtifactTask represents the relationship between artifacts and tasks +type ArtifactTask struct { + UUID string `gorm:"column:UUID; not null; primaryKey; type:varchar(191);"` + ArtifactID string `gorm:"column:ArtifactID; not null; type:varchar(191); index:idx_link_artifact_id; uniqueIndex:UniqueLink,priority:1;"` + TaskID string `gorm:"column:TaskID; not null; type:varchar(191); index:idx_link_task_id; uniqueIndex:UniqueLink,priority:2;"` + Type IOType `gorm:"column:Type; not null; uniqueIndex:UniqueLink,priority:3;"` + RunUUID string `gorm:"column:RunUUID; not null; type:varchar(191); index:idx_link_run_id;"` + Producer JSONData `gorm:"column:Producer; type:json; default:null;"` + ArtifactKey string `gorm:"column:ArtifactKey; not null; type:varchar(191); default:'';"` + + // Relationships + Artifact Artifact `gorm:"foreignKey:ArtifactID;references:UUID;constraint:fk_artifact_tasks_artifacts,OnDelete:CASCADE,OnUpdate:CASCADE;"` + Task Task `gorm:"foreignKey:TaskID;references:UUID;constraint:fk_artifact_tasks_tasks,OnDelete:CASCADE,OnUpdate:CASCADE;"` + Run Run `gorm:"foreignKey:RunUUID;references:UUID;constraint:fk_artifact_tasks_runs,OnDelete:CASCADE,OnUpdate:CASCADE;"` +} + +func (at ArtifactTask) PrimaryKeyColumnName() string { + return "UUID" +} + +func (at ArtifactTask) DefaultSortField() string { + return "UUID" +} + +func (at ArtifactTask) APIToModelFieldMap() map[string]string { + return artifactTaskAPIToModelFieldMap +} + +func (at ArtifactTask) GetModelName() string { + return "artifact_tasks" +} + +func (at ArtifactTask) GetSortByFieldPrefix(s string) string { + return "artifact_tasks." +} + +func (at ArtifactTask) GetKeyFieldPrefix() string { + return "artifact_tasks." +} + +var artifactTaskAPIToModelFieldMap = map[string]string{ + "id": "UUID", + "artifact_id": "ArtifactID", + "task_id": "TaskID", + "type": "Type", + "run_id": "RunUUID", + "producer": "Producer", + "key": "ArtifactKey", +} + +func (at ArtifactTask) GetField(name string) (string, bool) { + if field, ok := artifactTaskAPIToModelFieldMap[name]; ok { + return field, true + } + return "", false +} + +func (at ArtifactTask) GetFieldValue(name string) interface{} { + switch name { + case "UUID": + return at.UUID + case "ArtifactID": + return at.ArtifactID + case "TaskID": + return at.TaskID + case "Type": + return at.Type + case "RunUUID": + return at.RunUUID + case "Producer": + return at.Producer + case "ArtifactKey": + return at.ArtifactKey + default: + return nil + } +} diff --git a/backend/src/apiserver/model/cron_schedule.go b/backend/src/apiserver/model/cron_schedule.go index 1ee535acaa3..5df29c54412 100644 --- a/backend/src/apiserver/model/cron_schedule.go +++ b/backend/src/apiserver/model/cron_schedule.go @@ -11,6 +11,7 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. + package model type CronSchedule struct { diff --git a/backend/src/apiserver/model/periodic_schedule.go b/backend/src/apiserver/model/periodic_schedule.go index af28ca61cff..0e78e27bcf6 100644 --- a/backend/src/apiserver/model/periodic_schedule.go +++ b/backend/src/apiserver/model/periodic_schedule.go @@ -11,6 +11,7 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. + package model type PeriodicSchedule struct { diff --git a/backend/src/apiserver/model/resource_reference.go b/backend/src/apiserver/model/resource_reference.go index a5f7af2e6ca..0e4b29fcd0a 100644 --- a/backend/src/apiserver/model/resource_reference.go +++ b/backend/src/apiserver/model/resource_reference.go @@ -22,6 +22,8 @@ const ( RunResourceType ResourceType = "Run" PipelineResourceType ResourceType = "pipeline" PipelineVersionResourceType ResourceType = "PipelineVersion" + ArtifactResourceType ResourceType = "Artifact" + TaskResourceType ResourceType = "Task" ) const ( diff --git a/backend/src/apiserver/model/run.go b/backend/src/apiserver/model/run.go index 7823f4af9f1..588a9573806 100644 --- a/backend/src/apiserver/model/run.go +++ b/backend/src/apiserver/model/run.go @@ -211,7 +211,7 @@ type Run struct { Description string `gorm:"column:Description; not null;"` // Namespace is restricted to varchar(63) due to Kubernetes' naming constraints: // https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#dns-label-names - Namespace string `gorm:"column:Namespace; type:varchar(63); not null;index:namespace_createatinsec,priority:1; index:namespace_conditions_finishedatinsec,priority:1"` + Namespace string `gorm:"column:Namespace; type:varchar(63); not null;index:namespace_createatinsec,priority:1; index:namespace_conditions_finishedatinsec,priority:1"` // varchar(64) is carefully chosen to ensure composite index constraints remain // within MySQL's 767-byte limit // e.g., ExperimentId(varchar(64)) + Conditions(varchar(125)) + FinishedAtInSec(8 bytes) = 764 bytes < 767 bytes @@ -222,7 +222,10 @@ type Run struct { StorageState StorageState `gorm:"column:StorageState; not null;"` ServiceAccount string `gorm:"column:ServiceAccount; not null;"` - Metrics []*RunMetric `gorm:"foreignKey:RunUUID;references:UUID;constraint:run_metrics_RunUUID_run_details_UUID_foreign,OnDelete:CASCADE,OnUpdate:CASCADE"` // This 'has-many' relation replaces the legacy AddForeignKey constraint previously defined in client_manager.go + + // Deprecated: kept here for v1 report metrics backwards compatibility + // Remove this field from this Struct (and tag the FK in RunMetric struct, so the FK is unaffected) + Metrics []*RunMetricV1 `gorm:"foreignKey:RunUUID;references:UUID;constraint:run_metrics_RunUUID_run_details_UUID_foreign,OnDelete:CASCADE,OnUpdate:CASCADE"` // This 'has-many' relation replaces the legacy AddForeignKey constraint previously defined in client_manager.go // ResourceReferences are deprecated. Use Namespace, ExperimentId, // RecurringRunId, PipelineSpec.PipelineId, PipelineSpec.PipelineVersionId @@ -232,6 +235,9 @@ type Run struct { PipelineSpec RunDetails + + Tasks []*Task + TaskCount int `gorm:"-"` // Not persisted in DB, populated from task query } // Converts to v1beta1-compatible internal representation of run. @@ -332,7 +338,9 @@ type RunDetails struct { TaskDetails []*Task `gorm:"-"` } -type RunMetric struct { +// RunMetricV1 represents a v1 run metric. +// Deprecated: remove once v1 is removed. +type RunMetricV1 struct { RunUUID string `gorm:"column:RunUUID; not null; primaryKey; type:varchar(191);"` NodeID string `gorm:"column:NodeID; not null; primaryKey; type:varchar(191);"` Name string `gorm:"column:Name; not null; primaryKey; type:varchar(191);"` @@ -341,6 +349,10 @@ type RunMetric struct { Payload LargeText `gorm:"column:Payload; not null;"` } +func (RunMetricV1) TableName() string { + return "run_metrics" +} + type RuntimeStatus struct { UpdateTimeInSec int64 `json:"UpdateTimeInSec,omitempty"` State RuntimeState `json:"State,omitempty"` @@ -351,10 +363,6 @@ func (r Run) GetValueOfPrimaryKey() string { return r.UUID } -func GetRunTablePrimaryKeyColumn() string { - return "UUID" -} - // PrimaryKeyColumnName returns the primary key for model Run. func (r *Run) PrimaryKeyColumnName() string { return "UUID" diff --git a/backend/src/apiserver/model/task.go b/backend/src/apiserver/model/task.go index 93ff8c24ad0..90a59e2b631 100644 --- a/backend/src/apiserver/model/task.go +++ b/backend/src/apiserver/model/task.go @@ -15,35 +15,136 @@ package model import ( + "database/sql/driver" "encoding/json" + + apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/proto" ) +// JSONSlice represents JSON list data stored in database columns +type JSONSlice []interface{} + +func (j *JSONSlice) Value() (driver.Value, error) { + if j == nil { + return nil, nil + } + return json.Marshal(j) +} + +// Scan implements sql.Scanner interface for JSONSlice +func (j *JSONSlice) Scan(value interface{}) error { + if value == nil { + *j = nil + return nil + } + switch v := value.(type) { + case []byte: + return json.Unmarshal(v, j) + case string: + return json.Unmarshal([]byte(v), j) + default: + return nil + } +} + +// JSONData represents JSON struct data stored in database columns +type JSONData map[string]interface{} + +// Scan implements sql.Scanner interface for JSONData +func (j *JSONData) Scan(value interface{}) error { + if value == nil { + *j = nil + return nil + } + switch v := value.(type) { + case []byte: + return json.Unmarshal(v, j) + case string: + return json.Unmarshal([]byte(v), j) + default: + return nil + } +} + +// Value implements driver.Valuer interface for JSONData +func (j *JSONData) Value() (driver.Value, error) { + if j == nil { + return nil, nil + } + return json.Marshal(j) +} + +// PodNames represents JSON array of pod names +type PodNames []string + +// Scan implements sql.Scanner interface for PodNames +func (p *PodNames) Scan(value interface{}) error { + if value == nil { + *p = nil + return nil + } + + switch v := value.(type) { + case []byte: + return json.Unmarshal(v, p) + case string: + return json.Unmarshal([]byte(v), p) + default: + return nil + } +} + +// Value implements driver.Valuer interface for PodNames +func (p *PodNames) Value() (driver.Value, error) { + if p == nil { + return nil, nil + } + return json.Marshal(p) +} + +// TaskArtifactHydrated holds hydrated artifact info per task (not stored in DB) +type TaskArtifactHydrated struct { + Value *Artifact + Producer *IOProducer + Key string + Type apiv2beta1.IOType +} +type IOProducer struct { + TaskName string + Iteration *int64 +} + +type TaskType apiv2beta1.PipelineTaskDetail_TaskType +type TaskStatus apiv2beta1.PipelineTaskDetail_TaskState + type Task struct { - UUID string `gorm:"column:UUID; not null; primaryKey; type:varchar(191);"` - Namespace string `gorm:"column:Namespace; not null;"` - // PipelineName was deprecated. Use RunID instead. - PipelineName string `gorm:"column:PipelineName; not null;"` - // RunID is limited to varchar(191) to make it indexable as a foreign key. - // For details on type lengths and index safety, refer to comments in the Pipeline struct. - // nolint:staticcheck // [ST1003] Field name matches upstream legacy naming - RunID string `gorm:"column:RunUUID; type:varchar(191); not null; index:tasks_RunUUID_run_details_UUID_foreign;"` // Note: field name (RunID) ≠ column name (RunUUID). The former should be the foreign key instead of the letter. - Run Run `gorm:"foreignKey:RunID;references:UUID;constraint:tasks_RunUUID_run_details_UUID_foreign,OnDelete:CASCADE,OnUpdate:CASCADE;"` // A Task belongs to a Run. - PodName string `gorm:"column:PodName; not null;"` - MLMDExecutionID string `gorm:"column:MLMDExecutionID; not null;"` - CreatedTimestamp int64 `gorm:"column:CreatedTimestamp; not null;"` - StartedTimestamp int64 `gorm:"column:StartedTimestamp; default:0;"` - FinishedTimestamp int64 `gorm:"column:FinishedTimestamp; default:0;"` - Fingerprint string `gorm:"column:Fingerprint; not null;"` - Name string `gorm:"column:Name; default:null"` - ParentTaskId string `gorm:"column:ParentTaskUUID; default:null"` - State RuntimeState `gorm:"column:State; default:null;"` - StateHistoryString LargeText `gorm:"column:StateHistory; default:null;"` - MLMDInputs LargeText `gorm:"column:MLMDInputs; default:null;"` - MLMDOutputs LargeText `gorm:"column:MLMDOutputs; default:null;"` - ChildrenPodsString LargeText `gorm:"column:ChildrenPods; default:null;"` - StateHistory []*RuntimeStatus `gorm:"-;"` - ChildrenPods []string `gorm:"-;"` - Payload LargeText `gorm:"column:Payload; default:null;"` + UUID string `gorm:"column:UUID; not null; primaryKey; type:varchar(191);"` + Namespace string `gorm:"column:Namespace; not null; type:varchar(63);"` + RunUUID string `gorm:"column:RunUUID; type:varchar(191); not null; index:idx_parent_run,priority:1;"` + Run Run `gorm:"foreignKey:RunUUID;references:UUID;constraint:tasks_RunUUID_run_details_UUID_foreign,OnDelete:CASCADE,OnUpdate:CASCADE;"` + Pods JSONSlice `gorm:"column:pods; not null; type:json;"` + CreatedAtInSec int64 `gorm:"column:CreatedAtInSec; not null; index:idx_task_created_timestamp;"` + StartedInSec int64 `gorm:"column:StartedInSec; default:0; index:idx_task_started_timestamp;"` + FinishedInSec int64 `gorm:"column:FinishedInSec; default:0; index:idx_task_finished_timestamp;"` + Fingerprint string `gorm:"column:Fingerprint; not null; type:varchar(255);"` + Name string `gorm:"column:Name; type:varchar(128); default:null;"` + DisplayName string `gorm:"column:DisplayName; type:varchar(128); default:null;"` + ParentTaskUUID *string `gorm:"column:ParentTaskUUID; type:varchar(191); default:null; index:idx_parent_task_uuid; index:idx_parent_run,priority:2;"` + ParentTask *Task `gorm:"foreignKey:ParentTaskUUID;references:UUID;constraint:fk_tasks_parent_task,OnDelete:CASCADE,OnUpdate:CASCADE;"` + State TaskStatus `gorm:"column:State; not null;"` + StatusMetadata JSONData `gorm:"column:StatusMetadata; type:json; default:null;"` + StateHistory JSONSlice `gorm:"column:StateHistory; type:json;"` + InputParameters JSONSlice `gorm:"column:InputParameters; type:json;"` + OutputParameters JSONSlice `gorm:"column:OutputParameters; type:json;"` + Type TaskType `gorm:"column:Type; not null; index:idx_task_type;"` + TypeAttrs JSONData `gorm:"column:TypeAttrs; not null; type:json;"` + ScopePath JSONSlice `gorm:"column:ScopePath; type:json; default:null;"` + + // Transient fields populated during hydration (not stored in DB) + InputArtifactsHydrated []TaskArtifactHydrated `gorm:"-"` + OutputArtifactsHydrated []TaskArtifactHydrated `gorm:"-"` } func (t Task) ToString() string { @@ -60,7 +161,7 @@ func (t Task) PrimaryKeyColumnName() string { } func (t Task) DefaultSortField() string { - return "CreatedTimestamp" + return "CreatedAtInSec" } func (t Task) APIToModelFieldMap() map[string]string { @@ -80,25 +181,24 @@ func (t Task) GetKeyFieldPrefix() string { } var taskAPIToModelFieldMap = map[string]string{ - "task_id": "UUID", // v2beta1 API - "id": "UUID", // v1beta1 API - "namespace": "Namespace", - "pipeline_name": "PipelineName", // v2beta1 API - "pipelineName": "PipelineName", // v1beta1 API - "run_id": "RunUUID", // v2beta1 API - "runId": "RunUUID", // v1beta1 API - "display_name": "Name", // v2beta1 API - "execution_id": "MLMDExecutionID", // v2beta1 API - "create_time": "CreatedTimestamp", // v2beta1 API - "start_time": "StartedTimestamp", // v2beta1 API - "end_time": "FinishedTimestamp", // v2beta1 API - "fingerprint": "Fingerprint", - "state": "State", // v2beta1 API - "state_history": "StateHistory", // v2beta1 API - "parent_task_id": "ParentTaskUUID", // v2beta1 API - "mlmdExecutionID": "MLMDExecutionID", // v1beta1 API - "created_at": "CreatedTimestamp", // v1beta1 API - "finished_at": "FinishedTimestamp", // v1beta1 API + "name": "Name", + "display_name": "DisplayName", + "task_id": "UUID", + "run_id": "RunUUID", + "pods": "Pods", + "cache_fingerprint": "Fingerprint", + "create_time": "CreatedAtInSec", + "start_time": "StartedInSec", + "end_time": "FinishedInSec", + "status": "State", + "status_metadata": "StatusMetadata", + "state_history": "StateHistory", + "type": "Type", + "type_attributes": "TypeAttrs", + "parent_task_id": "ParentTaskUUID", + "inputs": "InputParameters", + "outputs": "OutputParameters", + "scope_path": "ScopePath", } func (t Task) GetField(name string) (string, bool) { @@ -114,29 +214,123 @@ func (t Task) GetFieldValue(name string) interface{} { return t.UUID case "Namespace": return t.Namespace - case "PipelineName": - return t.PipelineName - case "RunID": - return t.RunID - case "MLMDExecutionID": - return t.MLMDExecutionID - case "CreatedTimestamp": - return t.CreatedTimestamp - case "FinishedTimestamp": - return t.FinishedTimestamp + case "RunUUID": + return t.RunUUID + case "CreatedAtInSec": + return t.CreatedAtInSec + case "StartedInSec": + return t.StartedInSec + case "FinishedInSec": + return t.FinishedInSec case "Fingerprint": return t.Fingerprint - case "ParentTaskId": - return t.ParentTaskId + case "ParentTaskUUID": + return t.ParentTaskUUID case "State": return t.State + case "StatusMetadata": + return t.StatusMetadata + case "StateHistory": + return t.StateHistory case "Name": return t.Name - case "MLMDInputs": - return t.MLMDInputs - case "MLMDOutputs": - return t.MLMDOutputs + case "DisplayName": + return t.DisplayName + case "InputParameters": + return t.InputParameters + case "OutputParameters": + return t.OutputParameters + case "Type": + return t.Type + case "TypeAttrs": + return t.TypeAttrs + case "ScopePath": + return t.ScopePath default: return nil } } + +// ProtoSliceToJSONSlice converts a slice of protobuf messages (e.g., []*MyMsg) +// into a model.JSONSlice (i.e., []interface{}). +func ProtoSliceToJSONSlice[T proto.Message](msgs []T) (JSONSlice, error) { + out := make(JSONSlice, 0, len(msgs)) + for _, m := range msgs { + var pm proto.Message = m + if pm == nil { + out = append(out, nil) + continue + } + b, err := protojson.Marshal(m) + if err != nil { + return nil, err + } + var v interface{} + if err := json.Unmarshal(b, &v); err != nil { + return nil, err + } + out = append(out, v) + } + return out, nil +} + +// ProtoMessageToJSONData marshals a protobuf message into JSONData. +func ProtoMessageToJSONData(msg proto.Message) (JSONData, error) { + if msg == nil { + return nil, nil + } + b, err := protojson.Marshal(msg) + if err != nil { + return nil, err + } + var m map[string]interface{} + if err := json.Unmarshal(b, &m); err != nil { + return nil, err + } + return m, nil +} + +// JSONSliceToProtoSlice converts a JSONSlice (i.e., []interface{}) into a slice +// of protobuf messages. Provide a constructor for T so we can allocate a new +// concrete message for each element. +func JSONSliceToProtoSlice[T proto.Message](in JSONSlice, newT func() T) ([]T, error) { + if in == nil { + return nil, nil + } + out := make([]T, 0, len(in)) + for _, v := range in { + if v == nil { + var zero T + out = append(out, zero) + continue + } + b, err := json.Marshal(v) + if err != nil { + return nil, err + } + msg := newT() + if err := protojson.Unmarshal(b, msg); err != nil { + return nil, err + } + out = append(out, msg) + } + return out, nil +} + +// JSONDataToProtoMessage unmarshals JSONData into a protobuf message instance. +// Provide a constructor for T so we can allocate a concrete message to fill. +func JSONDataToProtoMessage[T proto.Message](data JSONData, newT func() T) (T, error) { + var zero T + if data == nil { + return zero, nil + } + b, err := json.Marshal(data) + if err != nil { + return zero, err + } + msg := newT() + if err := protojson.Unmarshal(b, msg); err != nil { + return zero, err + } + return msg, nil +} diff --git a/backend/src/apiserver/resource/client_manager_fake.go b/backend/src/apiserver/resource/client_manager_fake.go index 67c1d32451a..8594b0b7e92 100644 --- a/backend/src/apiserver/resource/client_manager_fake.go +++ b/backend/src/apiserver/resource/client_manager_fake.go @@ -23,6 +23,8 @@ import ( "github.com/kubeflow/pipelines/backend/src/common/util" ) +var _ ClientManagerInterface = &FakeClientManager{} + type FakeClientManager struct { db *storage.DB experimentStore storage.ExperimentStoreInterface @@ -30,6 +32,8 @@ type FakeClientManager struct { jobStore storage.JobStoreInterface runStore storage.RunStoreInterface taskStore storage.TaskStoreInterface + artifactStore storage.ArtifactStoreInterface + artifactTaskStore storage.ArtifactTaskStoreInterface resourceReferenceStore storage.ResourceReferenceStoreInterface dBStatusStore storage.DBStatusStoreInterface defaultExperimentStore storage.DefaultExperimentStoreInterface @@ -70,6 +74,8 @@ func NewFakeClientManager(time util.TimeInterface, uuid util.UUIDGeneratorInterf jobStore: storage.NewJobStore(db, time, nil), runStore: storage.NewRunStore(db, time), taskStore: storage.NewTaskStore(db, time, uuid), + artifactStore: storage.NewArtifactStore(db, time, uuid), + artifactTaskStore: storage.NewArtifactTaskStore(db, uuid), ExecClientFake: client.NewFakeExecClient(), resourceReferenceStore: storage.NewResourceReferenceStore(db, nil), dBStatusStore: storage.NewDBStatusStore(db), @@ -153,6 +159,14 @@ func (f *FakeClientManager) TaskStore() storage.TaskStoreInterface { return f.taskStore } +func (f *FakeClientManager) ArtifactStore() storage.ArtifactStoreInterface { + return f.artifactStore +} + +func (f *FakeClientManager) ArtifactTaskStore() storage.ArtifactTaskStoreInterface { + return f.artifactTaskStore +} + func (f *FakeClientManager) ResourceReferenceStore() storage.ResourceReferenceStoreInterface { return f.resourceReferenceStore } diff --git a/backend/src/apiserver/resource/resource_manager.go b/backend/src/apiserver/resource/resource_manager.go index 8cf16ecdb96..b03ffc5f35b 100644 --- a/backend/src/apiserver/resource/resource_manager.go +++ b/backend/src/apiserver/resource/resource_manager.go @@ -85,6 +85,8 @@ type ClientManagerInterface interface { JobStore() storage.JobStoreInterface RunStore() storage.RunStoreInterface TaskStore() storage.TaskStoreInterface + ArtifactStore() storage.ArtifactStoreInterface + ArtifactTaskStore() storage.ArtifactTaskStoreInterface ResourceReferenceStore() storage.ResourceReferenceStoreInterface DBStatusStore() storage.DBStatusStoreInterface DefaultExperimentStore() storage.DefaultExperimentStoreInterface @@ -113,6 +115,8 @@ type ResourceManager struct { jobStore storage.JobStoreInterface runStore storage.RunStoreInterface taskStore storage.TaskStoreInterface + artifactStore storage.ArtifactStoreInterface + artifactTaskStore storage.ArtifactTaskStoreInterface resourceReferenceStore storage.ResourceReferenceStoreInterface dBStatusStore storage.DBStatusStoreInterface defaultExperimentStore storage.DefaultExperimentStoreInterface @@ -136,6 +140,8 @@ func NewResourceManager(clientManager ClientManagerInterface, options *ResourceM jobStore: clientManager.JobStore(), runStore: clientManager.RunStore(), taskStore: clientManager.TaskStore(), + artifactStore: clientManager.ArtifactStore(), + artifactTaskStore: clientManager.ArtifactTaskStore(), resourceReferenceStore: clientManager.ResourceReferenceStore(), dBStatusStore: clientManager.DBStatusStore(), defaultExperimentStore: clientManager.DefaultExperimentStore(), @@ -413,7 +419,7 @@ func (r *ResourceManager) CreatePipelineAndPipelineVersion(p *model.Pipeline, pv return nil, nil, util.Wrap(err, "Failed to create a pipeline and a pipeline version due to template creation error") } // Validate pipeline's name in: - // 1. pipeline spec for v2 pipelines and v2-compatible pipeline must comply with MLMD requirements + // 1. pipeline spec for v2 pipelines and v2-compatible pipeline // 2. display name must be non-empty pipelineSpecName := "" if tmpl.IsV2() { @@ -640,7 +646,7 @@ func (r *ResourceManager) ReconcileSwfCrs(ctx context.Context) error { // If the pipeline isn't pinned, skip it. The runs API is used directly by the ScheduledWorkflow controller // in this case with just the pipeline ID and optionally the pipeline version ID. - if jobs[i].PipelineSpec.PipelineSpecManifest == "" && jobs[i].PipelineSpec.WorkflowSpecManifest == "" { + if jobs[i].PipelineSpecManifest == "" && jobs[i].WorkflowSpecManifest == "" { continue } @@ -695,17 +701,32 @@ func (r *ResourceManager) updateSwfCrSpec(ctx context.Context, k8sNamespace stri } // Fetches a run with a given id. +// GetRun fetches a run with full task hydration (backward compatible). func (r *ResourceManager) GetRun(runId string) (*model.Run, error) { - run, err := r.runStore.GetRun(runId) + return r.GetRunWithHydration(runId, true) +} + +// GetRunWithHydration fetches a run with optional task hydration. +// If hydrateTasks is true, full task details are loaded (expensive operation). +// If hydrateTasks is false, only task count is populated (lightweight operation). +func (r *ResourceManager) GetRunWithHydration(runID string, hydrateTasks bool) (*model.Run, error) { + run, err := r.runStore.GetRun(runID, hydrateTasks) if err != nil { - return nil, util.Wrapf(err, "Failed to fetch run %v", runId) + return nil, util.Wrapf(err, "Failed to fetch run %v", runID) } return run, nil } -// Fetches runs with a given set of filtering and listing options. +// ListRuns fetches runs with full task hydration (backward compatible). func (r *ResourceManager) ListRuns(filterContext *model.FilterContext, opts *list.Options) ([]*model.Run, int, string, error) { - runs, totalSize, nextPageToken, err := r.runStore.ListRuns(filterContext, opts) + return r.ListRunsWithHydration(filterContext, opts, true) +} + +// ListRunsWithHydration fetches runs with a given set of filtering and listing options. +// If hydrateTasks is true, full task details are loaded (expensive operation). +// If hydrateTasks is false, only task counts are populated (lightweight operation). +func (r *ResourceManager) ListRunsWithHydration(filterContext *model.FilterContext, opts *list.Options, hydrateTasks bool) ([]*model.Run, int, string, error) { + runs, totalSize, nextPageToken, err := r.runStore.ListRuns(filterContext, opts, hydrateTasks) if err != nil { return nil, 0, "", util.Wrap(err, "Failed to list runs") } @@ -797,14 +818,14 @@ func (r *ResourceManager) DeleteRun(ctx context.Context, runId string) error { // Creates a task entry. func (r *ResourceManager) CreateTask(t *model.Task) (*model.Task, error) { - run, err := r.GetRun(t.RunID) + run, err := r.GetRun(t.RunUUID) if err != nil { - return nil, util.Wrapf(err, "Failed to create a task for run %v", t.RunID) + return nil, util.Wrapf(err, "Failed to create a task for run %v", t.RunUUID) } if run.ExperimentId == "" { defaultExperimentId, err := r.GetDefaultExperimentId() if err != nil { - return nil, util.Wrapf(err, "Failed to create a task in run %v. Specify experiment id for the run or check if the default experiment exists", t.RunID) + return nil, util.Wrapf(err, "Failed to create a task in run %v. Specify experiment id for the run or check if the default experiment exists", t.RunUUID) } run.ExperimentId = defaultExperimentId } @@ -813,28 +834,49 @@ func (r *ResourceManager) CreateTask(t *model.Task) (*model.Task, error) { if t.Namespace == "" { namespace, err := r.GetNamespaceFromExperimentId(run.ExperimentId) if err != nil { - return nil, util.Wrapf(err, "Failed to create a task in run %v", t.RunID) + return nil, util.Wrapf(err, "Failed to create a task in run %v", t.RunUUID) } t.Namespace = namespace } if common.IsMultiUserMode() { if t.Namespace == "" { - return nil, util.NewInternalServerError(util.NewInvalidInputError("Task cannot have an empty namespace in multi-user mode"), "Failed to create a task in run %v", t.RunID) + return nil, util.NewInternalServerError(util.NewInvalidInputError("Task cannot have an empty namespace in multi-user mode"), "Failed to create a task in run %v", t.RunUUID) } } if err := r.CheckExperimentBelongsToNamespace(run.ExperimentId, t.Namespace); err != nil { - return nil, util.Wrapf(err, "Failed to create a task in run %v", t.RunID) + return nil, util.Wrapf(err, "Failed to create a task in run %v", t.RunUUID) } newTask, err := r.taskStore.CreateTask(t) if err != nil { - return nil, util.Wrapf(err, "Failed to create a task in run %v", t.RunID) + return nil, util.Wrapf(err, "Failed to create a task in run %v", t.RunUUID) } return newTask, nil } // Fetches tasks with a given set of filtering and listing options. -func (r *ResourceManager) ListTasks(filterContext *model.FilterContext, opts *list.Options) ([]*model.Task, int, string, error) { +// namespaceSet indicates whether the namespace filter was explicitly set (even if empty). +func (r *ResourceManager) ListTasks(runID, parentID, namespace string, opts *list.Options) ([]*model.Task, int, string, error) { + var filterContext *model.FilterContext + + switch { + case runID != "": + filterContext = &model.FilterContext{ + ReferenceKey: &model.ReferenceKey{Type: model.RunResourceType, ID: runID}, + } + case parentID != "": + filterContext = &model.FilterContext{ + ReferenceKey: &model.ReferenceKey{Type: model.TaskResourceType, ID: parentID}, + } + case namespace != "": + // Namespace filter is set (can be empty string in single-user mode) + filterContext = &model.FilterContext{ + ReferenceKey: &model.ReferenceKey{Type: model.NamespaceResourceType, ID: namespace}, + } + default: + filterContext = &model.FilterContext{} + } + tasks, totalSize, nextPageToken, err := r.taskStore.ListTasks(filterContext, opts) if err != nil { return nil, 0, "", util.Wrap(err, "Failed to list tasks") @@ -1139,8 +1181,9 @@ func (r *ResourceManager) CreateJob(ctx context.Context, job *model.Job) (*model } templateOptions := template.TemplateOptions{ - CacheDisabled: r.options.CacheDisabled, - DefaultWorkspace: r.options.DefaultWorkspace, + CacheDisabled: r.options.CacheDisabled, + DefaultWorkspace: r.options.DefaultWorkspace, + MLPipelineTLSEnabled: r.options.MLPipelineTLSEnabled, } tmpl, err := template.New(manifest, templateOptions) if err != nil { @@ -1271,16 +1314,6 @@ func (r *ResourceManager) DeleteJob(ctx context.Context, jobId string) error { return nil } -// Creates new tasks or updates existing ones. -// This is not a part of internal API exposed to persistence agent only. -func (r *ResourceManager) CreateOrUpdateTasks(t []*model.Task) ([]*model.Task, error) { - tasks, err := r.taskStore.CreateOrUpdateTasks(t) - if err != nil { - return nil, util.Wrap(err, "Failed to create or update tasks") - } - return tasks, nil -} - // Reports a workflow CR. // This is called to update runs. func (r *ResourceManager) ReportWorkflowResource(ctx context.Context, execSpec util.ExecutionSpec) (util.ExecutionSpec, error) { @@ -1450,12 +1483,10 @@ func (r *ResourceManager) ReportWorkflowResource(ctx context.Context, execSpec u if r.options.CollectMetrics { execNamespace := execSpec.ExecutionNamespace() execName := execSpec.ExecutionName() - if execStatus.Condition() == exec.ExecutionSucceeded { workflowSuccessCounter.WithLabelValues(execNamespace, execName).Inc() } else { glog.Errorf("pipeline '%s' finished with an error", execName) - // also collects counts regarding retries workflowFailedCounter.WithLabelValues(execNamespace, execName).Inc() } @@ -1529,8 +1560,9 @@ func (r *ResourceManager) fetchTemplateFromPipelineSpec(pipelineSpec *model.Pipe } } templateOptions := template.TemplateOptions{ - CacheDisabled: r.options.CacheDisabled, - DefaultWorkspace: r.options.DefaultWorkspace, + CacheDisabled: r.options.CacheDisabled, + DefaultWorkspace: r.options.DefaultWorkspace, + MLPipelineTLSEnabled: r.options.MLPipelineTLSEnabled, } tmpl, err := template.New([]byte(manifest), templateOptions) if err != nil { @@ -1617,21 +1649,27 @@ func (r *ResourceManager) CreateDefaultExperiment(namespace string) (string, err return defaultExperiment.UUID, nil } -// TODO(gkcalat): deprecate this as we no longer have metrics in the v2beta1 run message. -// Read metrics as ordinary artifacts instead. -// Creates a run metric entry. -func (r *ResourceManager) ReportMetric(metric *model.RunMetric) error { - err := r.runStore.CreateMetric(metric) +// ReportMetric Read metrics as ordinary artifacts instead. +// Creates a run metric entry. Deprecated. +func (r *ResourceManager) ReportMetric(metric *model.RunMetricV1) error { + err := r.runStore.CreateV1Metric(metric) if err != nil { return util.Wrap(err, "Failed to report a run metric") } return nil } +// UpdateTask updates a task entry. +func (r *ResourceManager) UpdateTask(new *model.Task) (*model.Task, error) { + // Update task + return r.taskStore.UpdateTask(new) +} + // ReadArtifact parses run's workflow to find artifact file path and reads the content of the file // from object store. func (r *ResourceManager) ReadArtifact(runID string, nodeID string, artifactName string) ([]byte, error) { - run, err := r.runStore.GetRun(runID) + // No need to hydrate tasks for reading artifacts + run, err := r.runStore.GetRun(runID, false) if err != nil { return nil, err } @@ -1693,15 +1731,16 @@ func (r *ResourceManager) CreatePipelineVersion(pv *model.PipelineVersion) (*mod // Create a template templateOptions := template.TemplateOptions{ - CacheDisabled: r.options.CacheDisabled, - DefaultWorkspace: r.options.DefaultWorkspace, + CacheDisabled: r.options.CacheDisabled, + DefaultWorkspace: r.options.DefaultWorkspace, + MLPipelineTLSEnabled: r.options.MLPipelineTLSEnabled, } tmpl, err := template.New(pipelineSpecBytes, templateOptions) if err != nil { return nil, util.Wrap(err, "Failed to create a pipeline version due to template creation error") } // Validate pipeline's name in: - // 1. pipeline spec for v2 pipelines and v2-compatible pipeline must comply with MLMD requirements + // 1. pipeline spec for v2 pipelines and v2-compatible pipeline // 2. display name must be non-empty pipelineSpecName := "" if tmpl.IsV2() { @@ -2055,7 +2094,7 @@ func (r *ResourceManager) GetValidExperimentNamespacePair(experimentId string, n return experimentId, namespace, nil } -// Fetches a task entry. +// GetTask Fetches a task entry. func (r *ResourceManager) GetTask(taskId string) (*model.Task, error) { task, err := r.taskStore.GetTask(taskId) if err != nil { @@ -2063,3 +2102,72 @@ func (r *ResourceManager) GetTask(taskId string) (*model.Task, error) { } return task, nil } + +// GetTaskChildren fetches all immediate child tasks of the given task UUID. +func (r *ResourceManager) GetTaskChildren(taskID string) ([]*model.Task, error) { + children, err := r.taskStore.GetChildTasks(taskID) + if err != nil { + return nil, util.Wrapf(err, "Failed to fetch children of task %v", taskID) + } + return children, nil +} + +// ListArtifactTasks Fetches artifact tasks with given filtering and listing options. +func (r *ResourceManager) ListArtifactTasks(filterContexts []*model.FilterContext, ioType *model.IOType, opts *list.Options) ([]*model.ArtifactTask, int, string, error) { + artifactTasks, totalSize, nextPageToken, err := r.artifactTaskStore.ListArtifactTasks(filterContexts, ioType, opts) + if err != nil { + return nil, 0, "", util.Wrap(err, "Failed to list artifact tasks") + } + return artifactTasks, totalSize, nextPageToken, nil +} + +// CreateArtifactTask Creates an artifact-task relationship entry. +func (r *ResourceManager) CreateArtifactTask(artifactTask *model.ArtifactTask) (*model.ArtifactTask, error) { + newAT, err := r.artifactTaskStore.CreateArtifactTask(artifactTask) + if err != nil { + return nil, util.Wrap(err, "Failed to create artifact-task relationship") + } + return newAT, nil +} + +// CreateArtifactTasks Creates multiple artifact-task relationship entries in bulk. +func (r *ResourceManager) CreateArtifactTasks(artifactTasks []*model.ArtifactTask) ([]*model.ArtifactTask, error) { + newATs, err := r.artifactTaskStore.CreateArtifactTasks(artifactTasks) + if err != nil { + return nil, util.Wrap(err, "Failed to create artifact-task relationships in bulk") + } + return newATs, nil +} + +// GetArtifact Fetches an artifact with a given id. +func (r *ResourceManager) GetArtifact(artifactID string) (*model.Artifact, error) { + artifact, err := r.artifactStore.GetArtifact(artifactID) + if err != nil { + return nil, util.Wrapf(err, "Failed to fetch artifact %v", artifactID) + } + return artifact, nil +} + +// CreateArtifact Creates an artifact entry. +func (r *ResourceManager) CreateArtifact(artifact *model.Artifact) (*model.Artifact, error) { + newArtifact, err := r.artifactStore.CreateArtifact(artifact) + if err != nil { + return nil, util.Wrap(err, "Failed to create artifact") + } + return newArtifact, nil +} + +// ListArtifacts Fetches artifacts with given filtering and listing options. +func (r *ResourceManager) ListArtifacts(filterContexts []*model.FilterContext, opts *list.Options) ([]*model.Artifact, int, string, error) { + // Use the first filter context for now (artifacts are typically filtered by namespace) + var filterContext *model.FilterContext + if len(filterContexts) > 0 { + filterContext = filterContexts[0] + } + + artifacts, totalSize, nextPageToken, err := r.artifactStore.ListArtifacts(filterContext, opts) + if err != nil { + return nil, 0, "", util.Wrap(err, "Failed to list artifacts") + } + return artifacts, totalSize, nextPageToken, nil +} diff --git a/backend/src/apiserver/resource/resource_manager_test.go b/backend/src/apiserver/resource/resource_manager_test.go index 6ebd7100ff4..1e57b774fff 100644 --- a/backend/src/apiserver/resource/resource_manager_test.go +++ b/backend/src/apiserver/resource/resource_manager_test.go @@ -2422,7 +2422,7 @@ func TestCreateJobDifferentDefaultServiceAccountName_ThroughWorkflowSpecV2(t *te }, }, } - expectedJob.PipelineSpec.PipelineName = job.PipelineSpec.PipelineName + expectedJob.PipelineName = job.PipelineName require.Equal(t, expectedJob.ToV1(), job.ToV1()) fetchedJob, err := manager.GetJob(job.UUID) require.Nil(t, err) @@ -3241,6 +3241,7 @@ func TestReconcileSwfCrs(t *testing.T) { swf.Spec.Workflow.Spec = nil swf, err = swfClient.Update(ctx, swf) require.Nil(t, swf.Spec.Workflow.Spec) + require.NoError(t, err) err = manager.ReconcileSwfCrs(ctx) require.Nil(t, err) @@ -4049,37 +4050,6 @@ func TestCreateDefaultExperiment_MultiUser(t *testing.T) { assert.Equal(t, expectedExperiment, experiment) } -func TestCreateTask(t *testing.T) { - _, manager, _, _, _, runDetail := initWithExperimentAndPipelineAndRun(t) - task := &model.Task{ - Namespace: "", - PipelineName: "pipeline/my-pipeline", - RunID: runDetail.UUID, - MLMDExecutionID: "1", - CreatedTimestamp: 1462875553, - FinishedTimestamp: 1462875663, - Fingerprint: "123", - } - - expectedTask := &model.Task{ - UUID: DefaultFakeUUID, - PipelineName: "pipeline/my-pipeline", - RunID: runDetail.UUID, - MLMDExecutionID: "1", - CreatedTimestamp: 1462875553, - FinishedTimestamp: 1462875663, - Fingerprint: "123", - } - createdTask, err := manager.CreateTask(task) - assert.Nil(t, err) - assert.Equal(t, expectedTask, createdTask, "The CreateTask return has unexpected value") - - // Verify the T in DB is in status PipelineVersionCreating. - storedTask, err := manager.taskStore.GetTask(DefaultFakeUUID) - assert.Nil(t, err) - assert.Equal(t, expectedTask, storedTask, "The StoredTask return has unexpected value") -} - var v2SpecHelloWorld = ` components: comp-hello-world: diff --git a/backend/src/apiserver/server/api_converter.go b/backend/src/apiserver/server/api_converter.go index 03767bac720..7b78f1da440 100644 --- a/backend/src/apiserver/server/api_converter.go +++ b/backend/src/apiserver/server/api_converter.go @@ -17,10 +17,10 @@ package server import ( "encoding/json" "fmt" - "sort" - "strconv" "time" + "google.golang.org/protobuf/encoding/protojson" + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" apiv1beta1 "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" @@ -1021,25 +1021,9 @@ func toApiRuntimeConfig(modelRuntime model.RuntimeConfig) *apiv2beta1.RuntimeCon return &apiRuntimeConfig } -// Converts internal runtime config representation to PipelineSpec's runtime config. -// Note: returns nil if a parsing error occurs. -func toPipelineSpecRuntimeConfig(cfg *model.RuntimeConfig) *pipelinespec.PipelineJob_RuntimeConfig { - if cfg == nil { - return &pipelinespec.PipelineJob_RuntimeConfig{} - } - runtimeParams := toMapProtoStructParameters(string(cfg.Parameters)) - if runtimeParams == nil { - return nil - } - return &pipelinespec.PipelineJob_RuntimeConfig{ - ParameterValues: runtimeParams, - GcsOutputDirectory: string(cfg.PipelineRoot), - } -} - // Converts API run metric to its internal representation. // Supports both v1beta1 and v2beta1 API. -func toModelRunMetric(m interface{}, runId string) (*model.RunMetric, error) { +func toModelRunMetricV1(m interface{}, runID string) (*model.RunMetricV1, error) { var name, nodeId, format string var val float64 switch apiRunMetric := m.(type) { @@ -1051,8 +1035,8 @@ func toModelRunMetric(m interface{}, runId string) (*model.RunMetric, error) { default: return nil, util.NewUnknownApiVersionError("RunMetric", m) } - modelMetric := &model.RunMetric{ - RunUUID: runId, + modelMetric := &model.RunMetricV1{ + RunUUID: runID, Name: name, NodeID: nodeId, NumberValue: val, @@ -1067,7 +1051,7 @@ func toModelRunMetric(m interface{}, runId string) (*model.RunMetric, error) { // Converts internal run metric representation to its API counterpart. // Supports v1beta1 API. -func toApiRunMetricV1(metric *model.RunMetric) *apiv1beta1.RunMetric { +func toAPIRunMetricV1(metric *model.RunMetricV1) *apiv1beta1.RunMetric { return &apiv1beta1.RunMetric{ Name: metric.Name, NodeId: metric.NodeID, @@ -1080,10 +1064,10 @@ func toApiRunMetricV1(metric *model.RunMetric) *apiv1beta1.RunMetric { // Converts an array of internal run metric representations to an array of their API counterparts. // Supports v1beta1 API. -func toApiRunMetricsV1(m []*model.RunMetric) []*apiv1beta1.RunMetric { +func toAPIRunMetricsV1(m []*model.RunMetricV1) []*apiv1beta1.RunMetric { apiMetrics := make([]*apiv1beta1.RunMetric, 0) for _, metric := range m { - apiMetrics = append(apiMetrics, toApiRunMetricV1(metric)) + apiMetrics = append(apiMetrics, toAPIRunMetricV1(metric)) } return apiMetrics } @@ -1112,46 +1096,6 @@ func toApiReportMetricsResultV1(metricName string, nodeId string, status string, return apiResultV1 } -// Converts API run or run details to internal run details representation. -// Supports both v1beta1 and v2beta1 API. -// TODO(gkcalat): update this to extend run details. -func toModelRunDetails(r interface{}) (*model.RunDetails, error) { - switch r := r.(type) { - case *apiv2beta1.Run: - apiRunV2 := r - modelRunDetails := &model.RunDetails{ - CreatedAtInSec: apiRunV2.GetCreatedAt().GetSeconds(), - ScheduledAtInSec: apiRunV2.GetScheduledAt().GetSeconds(), - FinishedAtInSec: apiRunV2.GetFinishedAt().GetSeconds(), - State: model.RuntimeState(apiRunV2.GetState().String()), - PipelineContextId: apiRunV2.GetRunDetails().GetPipelineContextId(), - PipelineRunContextId: apiRunV2.GetRunDetails().GetPipelineRunContextId(), - } - if apiRunV2.GetPipelineSpec() != nil { - spec, err := pipelineSpecStructToYamlString(apiRunV2.GetPipelineSpec()) - if err != nil { - return nil, util.NewInternalServerError(err, "Failed to convert a API run to internal run details representation due to pipeline spec parsing error") - } - modelRunDetails.PipelineRuntimeManifest = model.LargeText(spec) - } - return modelRunDetails, nil - case *apiv2beta1.RunDetails: - return toModelRunDetails(apiv2beta1.Run{RunDetails: r}) - case *apiv1beta1.RunDetail: - apiRunV1 := r.GetRun() - modelRunDetails, err := toModelRunDetails(apiRunV1) - if err != nil { - return nil, util.Wrap(err, "Failed to convert v1beta1 API run detail to its internal representation") - } - apiRuntimeV1 := r.GetPipelineRuntime() - modelRunDetails.PipelineRuntimeManifest = model.LargeText(apiRuntimeV1.GetPipelineManifest()) - modelRunDetails.WorkflowRuntimeManifest = model.LargeText(apiRuntimeV1.GetWorkflowManifest()) - return modelRunDetails, nil - default: - return nil, util.NewUnknownApiVersionError("RunDetails", r) - } -} - // Converts API run to its internal representation. // Supports both v1beta1 and v2beta1 API. func toModelRun(r interface{}) (*model.Run, error) { @@ -1163,10 +1107,10 @@ func toModelRun(r interface{}) (*model.Run, error) { var pipelineSpec, workflowSpec, runtimePipelineSpec, runtimeWorkflowSpec string var pipelineRoot, storageState, serviceAcc string var createTime, scheduleTime, finishTime int64 - var modelMetrics []*model.RunMetric + var modelMetrics []*model.RunMetricV1 + var modelTasks []*model.Task var state model.RuntimeState var stateHistory []*model.RuntimeStatus - var tasks []*model.Task switch r := r.(type) { case *apiv1beta1.Run: return toModelRun(&apiv1beta1.RunDetail{Run: r}) @@ -1204,9 +1148,9 @@ func toModelRun(r interface{}) (*model.Run, error) { scheduleTime = apiRunV1.GetScheduledAt().GetSeconds() finishTime = apiRunV1.GetFinishedAt().GetSeconds() if len(apiRunV1.GetMetrics()) > 0 { - modelMetrics = make([]*model.RunMetric, 0) + modelMetrics = make([]*model.RunMetricV1, 0) for _, metric := range apiRunV1.GetMetrics() { - modelMetric, err := toModelRunMetric(metric, runId) + modelMetric, err := toModelRunMetricV1(metric, runId) if err == nil { modelMetrics = append(modelMetrics, modelMetric) } @@ -1231,11 +1175,6 @@ func toModelRun(r interface{}) (*model.Run, error) { serviceAcc = apiRunV1.GetServiceAccount() case *apiv2beta1.Run: apiRunV2 := r - if temp, err := toModelTasks(apiRunV2.GetRunDetails().GetTaskDetails()); err == nil { - tasks = temp - } else { - return nil, util.NewInternalServerError(err, "Failed to convert a API run detail to its internal representation due to error converting tasks") - } if temp, err := toModelRuntimeState(apiRunV2.GetState()); err == nil { state = temp } else { @@ -1310,6 +1249,17 @@ func toModelRun(r interface{}) (*model.Run, error) { pipelineSpec = "" } specParams = "" + + if len(apiRunV2.Tasks) > 0 { + for _, apiTask := range apiRunV2.Tasks { + modelTask, err := toModelTask(apiTask) + if err != nil { + return nil, util.Wrap(err, "Failed to convert API run to its internal representation due to task conversion error") + } + modelTasks = append(modelTasks, modelTask) + } + } + default: return nil, util.NewUnknownApiVersionError("Run", r) } @@ -1348,8 +1298,9 @@ func toModelRun(r interface{}) (*model.Run, error) { FinishedAtInSec: finishTime, PipelineRuntimeManifest: model.LargeText(runtimePipelineSpec), WorkflowRuntimeManifest: model.LargeText(runtimeWorkflowSpec), - TaskDetails: tasks, + TaskDetails: modelTasks, }, + Tasks: modelTasks, } if err := validation.ValidateModel(&modelRun); err != nil { @@ -1390,7 +1341,7 @@ func toApiRunV1(r *model.Run) *apiv1beta1.Run { } var metrics []*apiv1beta1.RunMetric if r.Metrics != nil { - metrics = toApiRunMetricsV1(r.Metrics) + metrics = toAPIRunMetricsV1(r.Metrics) } if len(metrics) == 0 { metrics = nil @@ -1467,15 +1418,15 @@ func toApiRunV1(r *model.Run) *apiv1beta1.Run { specManifest := r.PipelineSpec.PipelineSpecManifest wfManifest := r.PipelineSpec.WorkflowSpecManifest return &apiv1beta1.Run{ - CreatedAt: timestamppb.New(time.Unix(r.RunDetails.CreatedAtInSec, 0)), + CreatedAt: timestamppb.New(time.Unix(r.CreatedAtInSec, 0)), Id: r.UUID, Metrics: metrics, Name: r.DisplayName, ServiceAccount: r.ServiceAccount, StorageState: apiv1beta1.Run_StorageState(apiv1beta1.Run_StorageState_value[string(r.StorageState.ToV1())]), Description: r.Description, - ScheduledAt: timestamppb.New(time.Unix(r.RunDetails.ScheduledAtInSec, 0)), - FinishedAt: timestamppb.New(time.Unix(r.RunDetails.FinishedAtInSec, 0)), + ScheduledAt: timestamppb.New(time.Unix(r.ScheduledAtInSec, 0)), + FinishedAt: timestamppb.New(time.Unix(r.FinishedAtInSec, 0)), Status: string(r.RunDetails.State.ToV1()), PipelineSpec: &apiv1beta1.PipelineSpec{ PipelineId: r.PipelineSpec.PipelineId, @@ -1509,14 +1460,32 @@ func toApiRun(r *model.Run) *apiv2beta1.Run { runtimeConfig = nil } } + apiTasks, err := generateAPITasks(r.Tasks) + if err != nil { + return &apiv2beta1.Run{ + RunId: r.UUID, + ExperimentId: r.ExperimentId, + Error: util.ToRpcStatus(err), + } + } + if len(apiTasks) == 0 { + apiTasks = nil + } + apiRd := &apiv2beta1.RunDetails{ PipelineContextId: r.RunDetails.PipelineContextId, PipelineRunContextId: r.RunDetails.PipelineRunContextId, - TaskDetails: toApiPipelineTaskDetails(r.RunDetails.TaskDetails), } if apiRd.PipelineContextId == 0 && apiRd.PipelineRunContextId == 0 && apiRd.TaskDetails == nil { apiRd = nil } + // Populate task count from either the TaskCount field or the length of Tasks + taskCount := int32(r.TaskCount) + if taskCount == 0 && len(apiTasks) > 0 { + // If TaskCount wasn't populated but we have tasks, use the task slice length + taskCount = int32(len(apiTasks)) + } + apiRunV2 := &apiv2beta1.Run{ RunId: r.UUID, ExperimentId: r.ExperimentId, @@ -1528,12 +1497,15 @@ func toApiRun(r *model.Run) *apiv2beta1.Run { StorageState: toApiRunStorageState(&r.StorageState), State: toApiRuntimeState(&r.RunDetails.State), StateHistory: toApiRuntimeStatuses(r.RunDetails.StateHistory), - CreatedAt: timestamppb.New(time.Unix(r.RunDetails.CreatedAtInSec, 0)), - ScheduledAt: timestamppb.New(time.Unix(r.RunDetails.ScheduledAtInSec, 0)), - FinishedAt: timestamppb.New(time.Unix(r.RunDetails.FinishedAtInSec, 0)), + CreatedAt: timestamppb.New(time.Unix(r.CreatedAtInSec, 0)), + ScheduledAt: timestamppb.New(time.Unix(r.ScheduledAtInSec, 0)), + FinishedAt: timestamppb.New(time.Unix(r.FinishedAtInSec, 0)), RunDetails: apiRd, + TaskCount: taskCount, + Tasks: apiTasks, } - err := util.NewInvalidInputError("Failed to parse the pipeline source") + + err = util.NewInvalidInputError("Failed to parse the pipeline source") if r.PipelineSpec.PipelineVersionId != "" { apiRunV2.PipelineSource = &apiv2beta1.Run_PipelineVersionReference{ PipelineVersionReference: &apiv2beta1.PipelineVersionReference{ @@ -1568,6 +1540,34 @@ func toApiRun(r *model.Run) *apiv2beta1.Run { } } +func generateAPITasks(tasks []*model.Task) ([]*apiv2beta1.PipelineTaskDetail, error) { + // Create map to store parent->children relationships + taskMap := make(map[string]*model.Task) + childrenMap := make(map[string][]*model.Task) + + // Build maps of tasks and parent->children relationships + for _, task := range tasks { + taskMap[task.UUID] = task + if task.ParentTaskUUID != nil { + childrenMap[*task.ParentTaskUUID] = append(childrenMap[*task.ParentTaskUUID], task) + } + } + + // Convert each task to API format, building child task info if it has children + apiTasks := make([]*apiv2beta1.PipelineTaskDetail, 0) + for _, task := range tasks { + childTasks := childrenMap[task.UUID] + + apiTask, err := toAPITask(task, childTasks) + if err != nil { + return nil, util.Wrap(err, "Failed to convert task to API format") + } + apiTasks = append(apiTasks, apiTask) + } + + return apiTasks, nil +} + // Converts an array of internal pipeline version representations to an array of API pipeline versions. // Supports v1beta1 API. func toApiRunsV1(runs []*model.Run) []*apiv1beta1.Run { @@ -1613,240 +1613,6 @@ func toApiRunDetailV1(r *model.Run) *apiv1beta1.RunDetail { return apiRunDetails } -// Converts API task to its internal representation. -// Supports both v1beta1 and v2beta1 API. -func toModelTask(t interface{}) (*model.Task, error) { - if t == nil { - return &model.Task{}, nil - } - var taskId, nodeId, namespace, pipelineName, runId, mlmdExecId, fingerprint string - var name, parentTaskId, state, inputs, outputs string - var createTime, startTime, finishTime int64 - var stateHistory []*model.RuntimeStatus - var children []string - switch t := t.(type) { - case *apiv1beta1.Task: - apiTaskV1 := t - namespace = apiTaskV1.GetNamespace() - taskId = apiTaskV1.GetId() - pipelineName = apiTaskV1.GetPipelineName() - runId = apiTaskV1.GetRunId() - mlmdExecId = apiTaskV1.GetMlmdExecutionID() - fingerprint = apiTaskV1.GetFingerprint() - createTime = apiTaskV1.GetCreatedAt().GetSeconds() - startTime = createTime - finishTime = apiTaskV1.GetFinishedAt().GetSeconds() - name = "" - parentTaskId = "" - state = "" - inputs = "" - outputs = "" - case *apiv2beta1.PipelineTaskDetail: - apiTaskDetailV2 := t - namespace = "" - taskId = apiTaskDetailV2.GetTaskId() - pipelineName = "" - runId = apiTaskDetailV2.GetRunId() - mlmdExecId = fmt.Sprint(apiTaskDetailV2.GetExecutionId()) - fingerprint = "" - createTime = apiTaskDetailV2.GetCreateTime().GetSeconds() - startTime = apiTaskDetailV2.GetStartTime().GetSeconds() - finishTime = apiTaskDetailV2.GetEndTime().GetSeconds() - name = apiTaskDetailV2.GetDisplayName() - parentTaskId = apiTaskDetailV2.GetParentTaskId() - state = apiTaskDetailV2.GetState().String() - if hist, err := toModelRuntimeStatuses(apiTaskDetailV2.GetStateHistory()); err == nil { - stateHistory = hist - } - if inpBytes, err := json.Marshal(apiTaskDetailV2.GetInputs()); err == nil { - inputs = string(inpBytes) - } - if outBytes, err := json.Marshal(apiTaskDetailV2.GetOutputs()); err == nil { - outputs = string(outBytes) - } - for _, c := range apiTaskDetailV2.GetChildTasks() { - if c.GetTaskId() != "" { - children = append(children, c.GetTaskId()) - } else { - children = append(children, c.GetPodName()) - } - } - case util.NodeStatus: - // TODO(gkcalat): parse input and output artifacts - wfStatus := t - nodeId = wfStatus.ID - name = wfStatus.DisplayName - state = wfStatus.State - startTime = wfStatus.StartTime - createTime = wfStatus.CreateTime - finishTime = wfStatus.FinishTime - children = wfStatus.Children - default: - return nil, util.NewUnknownApiVersionError("Task", t) - } - return &model.Task{ - UUID: taskId, - PodName: nodeId, - Namespace: namespace, - PipelineName: pipelineName, - RunID: runId, - MLMDExecutionID: mlmdExecId, - CreatedTimestamp: createTime, - StartedTimestamp: startTime, - FinishedTimestamp: finishTime, - Fingerprint: fingerprint, - Name: name, - ParentTaskId: parentTaskId, - State: model.RuntimeState(state).ToV2(), - StateHistory: stateHistory, - MLMDInputs: model.LargeText(inputs), - MLMDOutputs: model.LargeText(outputs), - ChildrenPods: children, - }, nil -} - -// Converts API tasks details into their internal representations. -// Supports both v1beta1 and v2beta1 API. -func toModelTasks(t interface{}) ([]*model.Task, error) { - if t == nil { - return nil, nil - } - switch t := t.(type) { - case []*apiv2beta1.PipelineTaskDetail: - apiTasks := t - modelTasks := make([]*model.Task, 0) - for _, apiTask := range apiTasks { - modelTask, err := toModelTask(apiTask) - if err != nil { - return nil, util.Wrap(err, "Failed to convert API tasks to their internal representations") - } - modelTasks = append(modelTasks, modelTask) - } - return modelTasks, nil - case util.ExecutionSpec: - execSpec := t - runId := execSpec.ExecutionObjectMeta().Labels[util.LabelKeyWorkflowRunId] - namespace := execSpec.ExecutionNamespace() - createdAt := execSpec.ExecutionObjectMeta().GetCreationTimestamp().Unix() - // Get sorted node names to make the results repeatable - nodes := execSpec.ExecutionStatus().NodeStatuses() - nodeNames := make([]string, 0, len(nodes)) - for nodeName := range nodes { - nodeNames = append(nodeNames, nodeName) - } - sort.Strings(nodeNames) - modelTasks := make([]*model.Task, 0) - for _, nodeName := range nodeNames { - node := nodes[nodeName] - modelTask, err := toModelTask(node) - if err != nil { - return nil, util.Wrap(err, "Failed to convert Argo workflow to tasks details") - } - modelTask.RunID = runId - modelTask.Namespace = namespace - modelTask.CreatedTimestamp = createdAt - modelTasks = append(modelTasks, modelTask) - } - return modelTasks, nil - default: - return nil, util.NewUnknownApiVersionError("[]Task", t) - } -} - -// Converts internal task representation to its API counterpart. -// Supports v1beta1 API. -func toApiTaskV1(task *model.Task) *apiv1beta1.Task { - return &apiv1beta1.Task{ - Id: task.UUID, - Namespace: task.Namespace, - PipelineName: task.PipelineName, - RunId: task.RunID, - MlmdExecutionID: task.MLMDExecutionID, - CreatedAt: timestamppb.New(time.Unix(task.CreatedTimestamp, 0)), - FinishedAt: timestamppb.New(time.Unix(task.FinishedTimestamp, 0)), - Fingerprint: task.Fingerprint, - } -} - -// Converts internal task representation to its API counterpart. -// Supports v2beta1 API. -// TODO(gkcalat): implement runtime details of a task. -func toApiPipelineTaskDetail(t *model.Task) *apiv2beta1.PipelineTaskDetail { - execId, err := strconv.ParseInt(t.MLMDExecutionID, 10, 64) - if err != nil { - execId = 0 - } - var inputArtifacts map[string]*apiv2beta1.ArtifactList - if t.MLMDInputs != "" { - err = json.Unmarshal([]byte(t.MLMDInputs), &inputArtifacts) - if err != nil { - return &apiv2beta1.PipelineTaskDetail{ - RunId: t.RunID, - TaskId: t.UUID, - Error: util.ToRpcStatus(util.NewInternalServerError(err, "Failed to convert task's internal representation to its API counterpart due to error parsing inputs")), - } - } - } - var outputArtifacts map[string]*apiv2beta1.ArtifactList - if t.MLMDOutputs != "" { - err = json.Unmarshal([]byte(t.MLMDOutputs), &outputArtifacts) - if err != nil { - return &apiv2beta1.PipelineTaskDetail{ - RunId: t.RunID, - TaskId: t.UUID, - Error: util.ToRpcStatus(util.NewInternalServerError(err, "Failed to convert task's internal representation to its API counterpart due to error parsing outputs")), - } - } - } - var children []*apiv2beta1.PipelineTaskDetail_ChildTask - for _, c := range t.ChildrenPods { - children = append(children, &apiv2beta1.PipelineTaskDetail_ChildTask{ - ChildTask: &apiv2beta1.PipelineTaskDetail_ChildTask_PodName{PodName: c}, - }) - } - return &apiv2beta1.PipelineTaskDetail{ - RunId: t.RunID, - TaskId: t.UUID, - DisplayName: t.Name, - CreateTime: timestamppb.New(time.Unix(t.CreatedTimestamp, 0)), - StartTime: timestamppb.New(time.Unix(t.StartedTimestamp, 0)), - EndTime: timestamppb.New(time.Unix(t.FinishedTimestamp, 0)), - State: apiv2beta1.RuntimeState(apiv2beta1.RuntimeState_value[t.State.ToString()]), - ExecutionId: execId, - Inputs: inputArtifacts, - Outputs: outputArtifacts, - ParentTaskId: t.ParentTaskId, - StateHistory: toApiRuntimeStatuses(t.StateHistory), - ChildTasks: children, - } -} - -// Converts and array of internal task representations to its API counterpart. -// Supports v1beta1 API. -func toApiTasksV1(tasks []*model.Task) []*apiv1beta1.Task { - if len(tasks) == 0 { - return nil - } - apiTasks := make([]*apiv1beta1.Task, 0) - for _, task := range tasks { - apiTasks = append(apiTasks, toApiTaskV1(task)) - } - return apiTasks -} - -// Converts and array of internal task representations to its API counterpart. -// Supports v2beta1 API. -func toApiPipelineTaskDetails(tasks []*model.Task) []*apiv2beta1.PipelineTaskDetail { - if len(tasks) == 0 { - return nil - } - apiTasks := make([]*apiv2beta1.PipelineTaskDetail, 0) - for _, task := range tasks { - apiTasks = append(apiTasks, toApiPipelineTaskDetail(task)) - } - return apiTasks -} - // Converts API recurring run to its internal representation. // Supports both v1beta1 and v2beta1 API. func toModelJob(j interface{}) (*model.Job, error) { @@ -2245,7 +2011,7 @@ func toApiRecurringRun(j *model.Job) *apiv2beta1.RecurringRun { ExperimentId: j.ExperimentId, } - if j.PipelineSpec.PipelineId == "" && j.PipelineSpec.PipelineVersionId == "" { + if j.PipelineId == "" && j.PipelineVersionId == "" { spec, err := YamlStringToPipelineSpecStruct(string(j.PipelineSpecManifest)) if err != nil { return &apiv2beta1.RecurringRun{ @@ -2348,59 +2114,6 @@ func toApiRunStorageState(s *model.StorageState) apiv2beta1.Run_StorageState { } } -// Converts internal storage state representation to its API run's counterpart. -// Support v1beta1 API. -// Note, default to STORAGESTATE_AVAILABLE. -func toApiRunStorageStateV1(s *model.StorageState) apiv1beta1.Run_StorageState { - if string(*s) == "" { - return apiv1beta1.Run_STORAGESTATE_AVAILABLE - } - switch string(*s) { - case string(model.StorageStateArchived), string(model.StorageStateArchived.ToV1()): - return apiv1beta1.Run_STORAGESTATE_ARCHIVED - case string(model.StorageStateAvailable), string(model.StorageStateAvailable.ToV1()): - return apiv1beta1.Run_STORAGESTATE_AVAILABLE - default: - return apiv1beta1.Run_STORAGESTATE_AVAILABLE - } -} - -// Converts internal storage state representation to its API experiment's counterpart. -// Support v2beta1 API. -func toApiExperimentStorageState(s *model.StorageState) apiv2beta1.Experiment_StorageState { - if string(*s) == "" { - return apiv2beta1.Experiment_STORAGE_STATE_UNSPECIFIED - } - switch string(*s) { - case string(model.StorageStateArchived), string(model.StorageStateArchived.ToV1()): - return apiv2beta1.Experiment_ARCHIVED - case string(model.StorageStateAvailable), string(model.StorageStateAvailable.ToV1()): - return apiv2beta1.Experiment_AVAILABLE - case string(model.StorageStateUnspecified), string(model.StorageStateUnspecified.ToV1()): - return apiv2beta1.Experiment_STORAGE_STATE_UNSPECIFIED - default: - return apiv2beta1.Experiment_STORAGE_STATE_UNSPECIFIED - } -} - -// Converts internal storage state representation to its API experiment's counterpart. -// Support v1beta1 API. -func toApiExperimentStorageStateV1(s *model.StorageState) apiv1beta1.Experiment_StorageState { - if string(*s) == "" { - return apiv1beta1.Experiment_STORAGESTATE_UNSPECIFIED - } - switch string(*s) { - case string(model.StorageStateArchived), string(model.StorageStateArchived.ToV1()): - return apiv1beta1.Experiment_STORAGESTATE_ARCHIVED - case string(model.StorageStateAvailable), string(model.StorageStateAvailable.ToV1()): - return apiv1beta1.Experiment_STORAGESTATE_AVAILABLE - case string(model.StorageStateUnspecified), string(model.StorageStateUnspecified.ToV1()): - return apiv1beta1.Experiment_STORAGESTATE_UNSPECIFIED - default: - return apiv1beta1.Experiment_STORAGESTATE_UNSPECIFIED - } -} - // Converts API runtime state to its internal representation. // Supports both v1beta1 and v2beta1 API. func toModelRuntimeState(s interface{}) (model.RuntimeState, error) { @@ -2496,3 +2209,523 @@ func toApiRuntimeStatuses(s []*model.RuntimeStatus) []*apiv2beta1.RuntimeStatus } return statuses } + +// Converts API v2beta1 artifact to its internal representation. +func toModelArtifact(a *apiv2beta1.Artifact) (*model.Artifact, error) { + if a == nil { + return nil, util.NewInvalidInputError("Artifact cannot be nil") + } + + modelArtifact := &model.Artifact{ + UUID: a.GetArtifactId(), + Namespace: a.GetNamespace(), + Type: model.ArtifactType(a.GetType()), + URI: a.Uri, + Name: a.GetName(), + Description: a.GetDescription(), + // NumberValue can be nil & nullable, so directly apply it + // instead of using a.GetNumberValue() (which will return 0 if nil). + NumberValue: a.NumberValue, + CreatedAtInSec: time.Now().Unix(), + LastUpdateInSec: time.Now().Unix(), + } + + if a.GetMetadata() != nil { + structValue := &structpb.Struct{Fields: a.GetMetadata()} + jsonDataBytes, err := protojson.Marshal(structValue) + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to marshal metadata to JSON") + } + var jsonData model.JSONData + if err := json.Unmarshal(jsonDataBytes, &jsonData); err != nil { + return nil, util.NewInternalServerError(err, "Failed to unmarshal JSON into JSONData map") + } + modelArtifact.Metadata = jsonData + } + + if err := validation.ValidateModel(modelArtifact); err != nil { + return nil, util.NewInternalServerError(err, "Failed to convert API artifact to internal representation") + } + return modelArtifact, nil +} + +// Converts internal artifact representation to its API counterpart. +// Supports v2beta1 API. +func toAPIArtifact(artifact *model.Artifact) (*apiv2beta1.Artifact, error) { + if artifact == nil { + return nil, util.NewInvalidInputError("Artifact cannot be nil") + } + + apiArtifact := &apiv2beta1.Artifact{ + ArtifactId: artifact.UUID, + Namespace: artifact.Namespace, + Type: apiv2beta1.Artifact_ArtifactType(artifact.Type), + Uri: artifact.URI, + Name: artifact.Name, + Description: artifact.Description, + NumberValue: artifact.NumberValue, + CreatedAt: timestamppb.New(time.Unix(artifact.CreatedAtInSec, 0)), + } + + if artifact.Metadata != nil { + jsonDataBytes, err := json.Marshal(artifact.Metadata) + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to marshal metadata to JSON") + } + var structValue structpb.Struct + if err := protojson.Unmarshal(jsonDataBytes, &structValue); err != nil { + return nil, util.NewInternalServerError(err, "Failed to unmarshal JSON into structpb.Struct") + } + apiArtifact.Metadata = structValue.GetFields() + } + + return apiArtifact, nil +} + +// Converts an array of internal artifact representations to an array of their API counterparts. +// Supports v2beta1 API. +func toAPIArtifacts(artifacts []*model.Artifact) []*apiv2beta1.Artifact { + apiArtifacts := make([]*apiv2beta1.Artifact, 0) + for _, artifact := range artifacts { + apiArtifact, err := toAPIArtifact(artifact) + if err != nil { + return nil + } + apiArtifacts = append(apiArtifacts, apiArtifact) + } + return apiArtifacts +} + +// Converts internal artifact task representation to its API counterpart. +// Supports v2beta1 API. +func toAPIArtifactTask(artifactTask *model.ArtifactTask) *apiv2beta1.ArtifactTask { + if artifactTask == nil { + return &apiv2beta1.ArtifactTask{} + } + + apiArtifactTask := &apiv2beta1.ArtifactTask{ + Id: artifactTask.UUID, + ArtifactId: artifactTask.ArtifactID, + TaskId: artifactTask.TaskID, + Type: apiv2beta1.IOType(artifactTask.Type), + RunId: artifactTask.RunUUID, + Key: artifactTask.ArtifactKey, + } + + // Convert Producer from JSONData to IOProducer + if artifactTask.Producer != nil { + producer, err := model.JSONDataToProtoMessage( + artifactTask.Producer, + func() *apiv2beta1.IOProducer { + return &apiv2beta1.IOProducer{} + }) + if err == nil { + apiArtifactTask.Producer = producer + } + } + + return apiArtifactTask +} + +// Converts an array of internal artifact task representations to an array of their API counterparts. +// Supports v2beta1 API. +func toAPIArtifactTasks(artifactTasks []*model.ArtifactTask) []*apiv2beta1.ArtifactTask { + apiArtifactTasks := make([]*apiv2beta1.ArtifactTask, 0) + for _, artifactTask := range artifactTasks { + apiArtifactTasks = append(apiArtifactTasks, toAPIArtifactTask(artifactTask)) + } + return apiArtifactTasks +} + +// Converts API v2beta1 ArtifactTask to its internal representation. +func toModelArtifactTask(apiAT *apiv2beta1.ArtifactTask) (*model.ArtifactTask, error) { + if apiAT == nil { + return nil, util.NewInvalidInputError("ArtifactTask cannot be nil") + } + + if apiAT.GetType() == apiv2beta1.IOType_UNSPECIFIED { + return nil, util.NewInvalidInputError("ArtifactTask's task id cannot be unspecified") + } + + modelAT := &model.ArtifactTask{ + UUID: apiAT.GetId(), + RunUUID: apiAT.GetRunId(), + ArtifactID: apiAT.GetArtifactId(), + TaskID: apiAT.GetTaskId(), + Type: model.IOType(apiAT.GetType()), + ArtifactKey: apiAT.GetKey(), + } + + // Convert Producer from IOProducer to JSONData + if apiAT.GetProducer() != nil { + producer, err := model.ProtoMessageToJSONData(apiAT.GetProducer()) + if err != nil { + return nil, util.Wrap(err, "Failed to convert producer to JSONData") + } + modelAT.Producer = producer + } + + return modelAT, nil +} + +// Converts API PipelineTaskDetail to its internal representation. +// Supports v2beta1 API. +// Note that InputArtifactsHydrated and OutputArtifactsHydrated are not converted as these +// are not stored in DB, and to fill them out would require additional DB queries to fetch Artifacts values. +func toModelTask(apiTask *apiv2beta1.PipelineTaskDetail) (*model.Task, error) { + if apiTask == nil { + return nil, util.NewInvalidInputError("Task cannot be nil") + } + + task := &model.Task{ + UUID: apiTask.GetTaskId(), + RunUUID: apiTask.GetRunId(), + ParentTaskUUID: apiTask.ParentTaskId, + Name: apiTask.GetName(), + DisplayName: apiTask.GetDisplayName(), + Fingerprint: apiTask.GetCacheFingerprint(), + } + + // Convert timestamps + if apiTask.GetCreateTime() != nil { + task.CreatedAtInSec = apiTask.GetCreateTime().GetSeconds() + } + if apiTask.GetStartTime() != nil { + task.StartedInSec = apiTask.GetStartTime().GetSeconds() + } + if apiTask.GetEndTime() != nil { + task.FinishedInSec = apiTask.GetEndTime().GetSeconds() + } + + // Convert status + task.State = model.TaskStatus(apiTask.GetState()) + + // Convert task type + task.Type = model.TaskType(apiTask.GetType()) + if apiTask.GetPods() != nil { + pods, err := model.ProtoSliceToJSONSlice(apiTask.GetPods()) + if err != nil { + return nil, err + } + task.Pods = pods + } + + // Convert status metadata from new StatusMetadata struct + if apiTask.GetStatusMetadata() != nil { + sm, err := model.ProtoMessageToJSONData(apiTask.GetStatusMetadata()) + if err != nil { + return nil, err + } + task.StatusMetadata = sm + } + + // Convert state history using structured TaskStateHistoryEntry + if len(apiTask.GetStateHistory()) > 0 { + sh, err := model.ProtoSliceToJSONSlice(apiTask.GetStateHistory()) + if err != nil { + return nil, err + } + task.StateHistory = sh + } + + // Convert inputs: store full InputOutputs in InputParameters and artifacts subset in InputArtifacts + if apiTask.GetInputs() != nil { + if apiTask.GetInputs().GetParameters() != nil { + parameters, err := model.ProtoSliceToJSONSlice(apiTask.GetInputs().GetParameters()) + if err != nil { + return nil, err + } + task.InputParameters = parameters + } + } + + // Convert outputs: store full InputOutputs in OutputParameters and artifacts subset in OutputArtifacts + if apiTask.GetOutputs() != nil { + if apiTask.GetOutputs().GetParameters() != nil { + artifacts, err := model.ProtoSliceToJSONSlice(apiTask.GetOutputs().GetParameters()) + if err != nil { + return nil, err + } + task.OutputParameters = artifacts + } + } + + if apiTask.GetTypeAttributes() != nil { + attrs, err := model.ProtoMessageToJSONData(apiTask.GetTypeAttributes()) + if err != nil { + return nil, err + } + task.TypeAttrs = attrs + } + + // Convert scope_path - validate it's not empty if provided + if apiTask.GetScopePath() != nil { + if len(apiTask.GetScopePath()) == 0 { + return nil, util.NewInvalidInputError("scope_path cannot be empty when provided") + } + // Convert []string to JSONSlice + scopePathSlice := make(model.JSONSlice, len(apiTask.GetScopePath())) + for i, path := range apiTask.GetScopePath() { + scopePathSlice[i] = path + } + task.ScopePath = scopePathSlice + } + + return task, nil +} + +// Converts internal task representation to its API counterpart. +// Supports v2beta1 API. +// Note that child tasks are not stored in the tasks table so +// they must be provided as an argument. +func toAPITask(modelTask *model.Task, childTasks []*model.Task) (*apiv2beta1.PipelineTaskDetail, error) { + if modelTask == nil { + return nil, util.NewInvalidInputError("Task cannot be nil") + } + + apiTask := &apiv2beta1.PipelineTaskDetail{ + TaskId: modelTask.UUID, + RunId: modelTask.RunUUID, + ParentTaskId: modelTask.ParentTaskUUID, + Name: modelTask.Name, + DisplayName: modelTask.DisplayName, + CacheFingerprint: modelTask.Fingerprint, + Inputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{}, + Outputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{}, + } + + // Convert timestamps + if modelTask.CreatedAtInSec > 0 { + apiTask.CreateTime = ×tamppb.Timestamp{Seconds: modelTask.CreatedAtInSec} + } + if modelTask.StartedInSec > 0 { + apiTask.StartTime = ×tamppb.Timestamp{Seconds: modelTask.StartedInSec} + } + if modelTask.FinishedInSec > 0 { + apiTask.EndTime = ×tamppb.Timestamp{Seconds: modelTask.FinishedInSec} + } + + // Convert status + apiTask.State = apiv2beta1.PipelineTaskDetail_TaskState(modelTask.State) + + // Convert task type + apiTask.Type = apiv2beta1.PipelineTaskDetail_TaskType(modelTask.Type) + + // Set pod name from the first pod in PodNames array + if modelTask.Pods != nil { + apiPods, err := model.JSONSliceToProtoSlice( + modelTask.Pods, + func() *apiv2beta1.PipelineTaskDetail_TaskPod { + return &apiv2beta1.PipelineTaskDetail_TaskPod{} + }) + if err != nil { + return nil, err + } + apiTask.Pods = apiPods + } + + // Convert status metadata to new StatusMetadata struct + if modelTask.StatusMetadata != nil { + statusMeta, err := model.JSONDataToProtoMessage( + modelTask.StatusMetadata, + func() *apiv2beta1.PipelineTaskDetail_StatusMetadata { + return &apiv2beta1.PipelineTaskDetail_StatusMetadata{} + }) + if err != nil { + return nil, err + } + apiTask.StatusMetadata = statusMeta + } + + // Convert state history from JSONData back to RuntimeStatus slice using structured approach + if modelTask.StateHistory != nil { + apiSH, err := model.JSONSliceToProtoSlice( + modelTask.StateHistory, + func() *apiv2beta1.PipelineTaskDetail_TaskStatus { + return &apiv2beta1.PipelineTaskDetail_TaskStatus{} + }) + if err != nil { + return nil, err + } + apiTask.StateHistory = apiSH + } + + // Convert InputParameters to API inputs field + if modelTask.InputParameters != nil { + apiInputParams, err := model.JSONSliceToProtoSlice( + modelTask.InputParameters, + func() *apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter { + return &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{} + }) + if err != nil { + return nil, err + } + apiTask.Inputs.Parameters = apiInputParams + } + + // Convert OutputParameters to API outputs field + if modelTask.OutputParameters != nil { + apiOutputParams, err := model.JSONSliceToProtoSlice( + modelTask.OutputParameters, + func() *apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter { + return &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{} + }) + if err != nil { + return nil, err + } + apiTask.Outputs.Parameters = apiOutputParams + } + + // Populate artifacts from hydrated fields on the model task with shared converter + convertHydrated := func(in []model.TaskArtifactHydrated) ([]*apiv2beta1.PipelineTaskDetail_InputOutputs_IOArtifact, error) { + if len(in) == 0 { + return nil, nil + } + + // Group artifacts by (ArtifactKey, Type, Producer) to consolidate metrics + type groupKey struct { + artifactKey string + ioType apiv2beta1.IOType + producerTask string + hasIteration bool + iterationVal int64 + } + + makeKey := func(h model.TaskArtifactHydrated) groupKey { + key := groupKey{ + artifactKey: h.Key, + ioType: h.Type, + } + if h.Producer != nil { + key.producerTask = h.Producer.TaskName + if h.Producer.Iteration != nil { + key.hasIteration = true + key.iterationVal = *h.Producer.Iteration + } + } + return key + } + + grouped := make(map[groupKey][]model.TaskArtifactHydrated) + for _, h := range in { + key := makeKey(h) + grouped[key] = append(grouped[key], h) + } + + // Convert grouped artifacts to IOArtifacts + out := make([]*apiv2beta1.PipelineTaskDetail_InputOutputs_IOArtifact, 0, len(grouped)) + for _, hydratedGroup := range grouped { + // Check if all artifacts in this group are metrics + allMetrics := true + for _, h := range hydratedGroup { + if h.Value == nil || h.Value.Type != model.ArtifactType(apiv2beta1.Artifact_Metric) { + allMetrics = false + break + } + } + + if allMetrics && len(hydratedGroup) > 1 { + // Multiple metrics with same key - consolidate into ONE IOArtifact with multiple artifacts + apiArtifacts := make([]*apiv2beta1.Artifact, 0, len(hydratedGroup)) + for _, h := range hydratedGroup { + if h.Value != nil { + apiArt, err := toAPIArtifact(h.Value) + if err != nil { + return nil, err + } + apiArtifacts = append(apiArtifacts, apiArt) + } + } + + // Use first hydrated entry for common fields + firstHydrated := hydratedGroup[0] + ioArtifact := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOArtifact{ + Artifacts: apiArtifacts, + ArtifactKey: firstHydrated.Key, + Type: firstHydrated.Type, + } + if firstHydrated.Producer != nil { + ioArtifact.Producer = &apiv2beta1.IOProducer{ + TaskName: firstHydrated.Producer.TaskName, + Iteration: firstHydrated.Producer.Iteration, + } + } + out = append(out, ioArtifact) + } else { + // Non-metrics or single artifact - one IOArtifact per artifact + for _, h := range hydratedGroup { + var apiArt *apiv2beta1.Artifact + if h.Value != nil { + apiArtConv, err := toAPIArtifact(h.Value) + if err != nil { + return nil, err + } + apiArt = apiArtConv + } + ioArtifact := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOArtifact{ + Artifacts: []*apiv2beta1.Artifact{apiArt}, + ArtifactKey: h.Key, + Type: h.Type, + } + if h.Producer != nil { + ioArtifact.Producer = &apiv2beta1.IOProducer{ + TaskName: h.Producer.TaskName, + Iteration: h.Producer.Iteration, + } + } + out = append(out, ioArtifact) + } + } + } + return out, nil + } + if arts, err := convertHydrated(modelTask.InputArtifactsHydrated); err != nil { + return nil, err + } else if len(arts) > 0 { + apiTask.Inputs.Artifacts = arts + } + if arts, err := convertHydrated(modelTask.OutputArtifactsHydrated); err != nil { + return nil, err + } else if len(arts) > 0 { + apiTask.Outputs.Artifacts = arts + } + + // Extract additional fields from TypeAttrs + if modelTask.TypeAttrs != nil { + apiTypeAttrs, err := model.JSONDataToProtoMessage( + modelTask.TypeAttrs, + func() *apiv2beta1.PipelineTaskDetail_TypeAttributes { + return &apiv2beta1.PipelineTaskDetail_TypeAttributes{} + }) + if err != nil { + return nil, err + } + apiTask.TypeAttributes = apiTypeAttrs + } + + // Convert child tasks + apiChildTasks := make([]*apiv2beta1.PipelineTaskDetail_ChildTask, 0) + for _, childTask := range childTasks { + apiChildTask := &apiv2beta1.PipelineTaskDetail_ChildTask{ + TaskId: childTask.UUID, + Name: childTask.Name, + } + apiChildTasks = append(apiChildTasks, apiChildTask) + } + if len(apiChildTasks) > 0 { + apiTask.ChildTasks = apiChildTasks + } + + // Convert scope_path from JSONSlice to []string + if modelTask.ScopePath != nil { + scopePath := make([]string, 0, len(modelTask.ScopePath)) + for _, path := range modelTask.ScopePath { + if pathStr, ok := path.(string); ok { + scopePath = append(scopePath, pathStr) + } + } + apiTask.ScopePath = scopePath + } + + return apiTask, nil +} diff --git a/backend/src/apiserver/server/api_converter_metrics_test.go b/backend/src/apiserver/server/api_converter_metrics_test.go new file mode 100644 index 00000000000..069d8428fe4 --- /dev/null +++ b/backend/src/apiserver/server/api_converter_metrics_test.go @@ -0,0 +1,289 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + "testing" + + apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/model" + "github.com/stretchr/testify/assert" +) + +// TestToApiTask_MetricsGrouping tests that multiple metric artifacts with the same +// ArtifactKey, Type, and Producer are consolidated into a single IOArtifact +func TestToApiTask_MetricsGrouping(t *testing.T) { + // Create multiple metric artifacts with same key + accuracy := 0.95 + precision := 0.87 + recall := 0.91 + + accuracyMetadata := model.JSONData{ + "accuracy": accuracy, + } + precisionMetadata := model.JSONData{ + "precision": precision, + } + recallMetadata := model.JSONData{ + "recall": recall, + } + + modelTask := &model.Task{ + UUID: "task-123", + RunUUID: "run-456", + Name: "metrics-task", + Namespace: "ns1", + OutputArtifactsHydrated: []model.TaskArtifactHydrated{ + { + Key: "metrics", + Type: apiv2beta1.IOType_OUTPUT, + Value: &model.Artifact{ + UUID: "artifact-1", + Name: "accuracy", + Type: model.ArtifactType(apiv2beta1.Artifact_Metric), + NumberValue: &accuracy, + Metadata: accuracyMetadata, + }, + Producer: &model.IOProducer{ + TaskName: "metrics-task", + }, + }, + { + Key: "metrics", + Type: apiv2beta1.IOType_OUTPUT, + Value: &model.Artifact{ + UUID: "artifact-2", + Name: "precision", + Type: model.ArtifactType(apiv2beta1.Artifact_Metric), + NumberValue: &precision, + Metadata: precisionMetadata, + }, + Producer: &model.IOProducer{ + TaskName: "metrics-task", + }, + }, + { + Key: "metrics", + Type: apiv2beta1.IOType_OUTPUT, + Value: &model.Artifact{ + UUID: "artifact-3", + Name: "recall", + Type: model.ArtifactType(apiv2beta1.Artifact_Metric), + NumberValue: &recall, + Metadata: recallMetadata, + }, + Producer: &model.IOProducer{ + TaskName: "metrics-task", + }, + }, + }, + } + + apiTask, err := toAPITask(modelTask, nil) + assert.NoError(t, err) + assert.NotNil(t, apiTask) + + // Verify we have ONE IOArtifact for all three metrics + assert.Equal(t, 1, len(apiTask.Outputs.Artifacts), "Should have exactly one IOArtifact for grouped metrics") + + ioArtifact := apiTask.Outputs.Artifacts[0] + assert.Equal(t, "metrics", ioArtifact.ArtifactKey) + assert.Equal(t, apiv2beta1.IOType_OUTPUT, ioArtifact.Type) + + // Verify all three metric artifacts are in the same IOArtifact + assert.Equal(t, 3, len(ioArtifact.Artifacts), "Should have all three metric artifacts in one IOArtifact") + + // Verify each artifact is present + artifactNames := make(map[string]bool) + for _, artifact := range ioArtifact.Artifacts { + artifactNames[artifact.Name] = true + assert.Equal(t, apiv2beta1.Artifact_Metric, artifact.Type) + } + + assert.True(t, artifactNames["accuracy"], "Should contain accuracy metric") + assert.True(t, artifactNames["precision"], "Should contain precision metric") + assert.True(t, artifactNames["recall"], "Should contain recall metric") +} + +// TestToApiTask_MetricsGrouping_DifferentProducers tests that metrics from different +// producers are NOT grouped together +func TestToApiTask_MetricsGrouping_DifferentProducers(t *testing.T) { + accuracy := 0.95 + precision := 0.87 + + modelTask := &model.Task{ + UUID: "task-123", + RunUUID: "run-456", + Name: "metrics-task", + Namespace: "ns1", + OutputArtifactsHydrated: []model.TaskArtifactHydrated{ + { + Key: "metrics", + Type: apiv2beta1.IOType_OUTPUT, + Value: &model.Artifact{ + UUID: "artifact-1", + Name: "accuracy", + Type: model.ArtifactType(apiv2beta1.Artifact_Metric), + NumberValue: &accuracy, + }, + Producer: &model.IOProducer{ + TaskName: "task-a", + }, + }, + { + Key: "metrics", + Type: apiv2beta1.IOType_OUTPUT, + Value: &model.Artifact{ + UUID: "artifact-2", + Name: "precision", + Type: model.ArtifactType(apiv2beta1.Artifact_Metric), + NumberValue: &precision, + }, + Producer: &model.IOProducer{ + TaskName: "task-b", + }, + }, + }, + } + + apiTask, err := toAPITask(modelTask, nil) + assert.NoError(t, err) + assert.NotNil(t, apiTask) + + // Verify we have TWO IOArtifacts (different producers) + assert.Equal(t, 2, len(apiTask.Outputs.Artifacts), "Should have two IOArtifacts for different producers") + + // Each IOArtifact should have one artifact + for _, ioArtifact := range apiTask.Outputs.Artifacts { + assert.Equal(t, 1, len(ioArtifact.Artifacts), "Each IOArtifact should have one artifact") + } +} + +// TestToApiTask_MetricsGrouping_WithIterations tests that metrics from different +// loop iterations are NOT grouped together +func TestToApiTask_MetricsGrouping_WithIterations(t *testing.T) { + accuracy1 := 0.95 + accuracy2 := 0.87 + iter0 := int64(0) + iter1 := int64(1) + + modelTask := &model.Task{ + UUID: "task-123", + RunUUID: "run-456", + Name: "loop-task", + Namespace: "ns1", + OutputArtifactsHydrated: []model.TaskArtifactHydrated{ + { + Key: "metrics", + Type: apiv2beta1.IOType_ITERATOR_OUTPUT, + Value: &model.Artifact{ + UUID: "artifact-1", + Name: "accuracy", + Type: model.ArtifactType(apiv2beta1.Artifact_Metric), + NumberValue: &accuracy1, + }, + Producer: &model.IOProducer{ + TaskName: "loop-task", + Iteration: &iter0, + }, + }, + { + Key: "metrics", + Type: apiv2beta1.IOType_ITERATOR_OUTPUT, + Value: &model.Artifact{ + UUID: "artifact-2", + Name: "accuracy", + Type: model.ArtifactType(apiv2beta1.Artifact_Metric), + NumberValue: &accuracy2, + }, + Producer: &model.IOProducer{ + TaskName: "loop-task", + Iteration: &iter1, + }, + }, + }, + } + + apiTask, err := toAPITask(modelTask, nil) + assert.NoError(t, err) + assert.NotNil(t, apiTask) + + // Verify we have TWO IOArtifacts (different iterations) + assert.Equal(t, 2, len(apiTask.Outputs.Artifacts), "Should have two IOArtifacts for different iterations") + + // Verify iterations are correct + iterations := make(map[int64]bool) + for _, ioArtifact := range apiTask.Outputs.Artifacts { + assert.NotNil(t, ioArtifact.Producer) + assert.NotNil(t, ioArtifact.Producer.Iteration) + iterations[*ioArtifact.Producer.Iteration] = true + } + + assert.True(t, iterations[0], "Should have iteration 0") + assert.True(t, iterations[1], "Should have iteration 1") +} + +// TestToApiTask_NonMetrics tests that non-metric artifacts are not grouped +func TestToApiTask_NonMetrics(t *testing.T) { + uri1 := "s3://bucket/dataset1" + uri2 := "s3://bucket/dataset2" + + modelTask := &model.Task{ + UUID: "task-123", + RunUUID: "run-456", + Name: "data-task", + Namespace: "ns1", + OutputArtifactsHydrated: []model.TaskArtifactHydrated{ + { + Key: "datasets", + Type: apiv2beta1.IOType_OUTPUT, + Value: &model.Artifact{ + UUID: "artifact-1", + Name: "dataset1", + Type: model.ArtifactType(apiv2beta1.Artifact_Dataset), + URI: &uri1, + }, + Producer: &model.IOProducer{ + TaskName: "data-task", + }, + }, + { + Key: "datasets", + Type: apiv2beta1.IOType_OUTPUT, + Value: &model.Artifact{ + UUID: "artifact-2", + Name: "dataset2", + Type: model.ArtifactType(apiv2beta1.Artifact_Dataset), + URI: &uri2, + }, + Producer: &model.IOProducer{ + TaskName: "data-task", + }, + }, + }, + } + + apiTask, err := toAPITask(modelTask, nil) + assert.NoError(t, err) + assert.NotNil(t, apiTask) + + // Verify we have TWO IOArtifacts (non-metrics are not grouped) + assert.Equal(t, 2, len(apiTask.Outputs.Artifacts), "Non-metric artifacts should not be grouped") + + // Each IOArtifact should have one artifact + for _, ioArtifact := range apiTask.Outputs.Artifacts { + assert.Equal(t, 1, len(ioArtifact.Artifacts), "Each IOArtifact should have one artifact") + } +} diff --git a/backend/src/apiserver/server/api_converter_test.go b/backend/src/apiserver/server/api_converter_test.go index bd1635904fc..75da4d974bc 100644 --- a/backend/src/apiserver/server/api_converter_test.go +++ b/backend/src/apiserver/server/api_converter_test.go @@ -806,9 +806,9 @@ func TestToModelRunMetric(t *testing.T) { Format: apiv1beta1.RunMetric_RAW, } - actualModelRunMetric, err := toModelRunMetric(apiRunMetric, "run-1") + actualModelRunMetric, err := toModelRunMetricV1(apiRunMetric, "run-1") assert.Nil(t, err) - expectedModelRunMetric := &model.RunMetric{ + expectedModelRunMetric := &model.RunMetricV1{ RunUUID: "run-1", Name: "metric-1", NodeID: "node-1", @@ -826,9 +826,9 @@ func TestToModelRunMetric(t *testing.T) { Value: &apiv1beta1.RunMetric_NumberValue{NumberValue: 0.88}, Format: apiv1beta1.RunMetric_RAW, } - _, err := toModelRunMetric(apiRunMetric, "run-1") + _, err := toModelRunMetricV1(apiRunMetric, "run-1") assert.NotNil(t, err) - assert.Contains(t, err.Error(), "RunMetric.Name length cannot exceed 191") + assert.Contains(t, err.Error(), "RunMetricV1.Name length cannot exceed 191") } // Test NodeID length overflow @@ -840,9 +840,9 @@ func TestToModelRunMetric(t *testing.T) { Value: &apiv1beta1.RunMetric_NumberValue{NumberValue: 0.88}, Format: apiv1beta1.RunMetric_RAW, } - _, err := toModelRunMetric(apiRunMetric, "run-1") + _, err := toModelRunMetricV1(apiRunMetric, "run-1") assert.NotNil(t, err) - assert.Contains(t, err.Error(), "RunMetric.NodeID length cannot exceed 191") + assert.Contains(t, err.Error(), "RunMetricV1.NodeID length cannot exceed 191") } } @@ -1443,13 +1443,13 @@ func TestToApiRunDetailV1_V1Params(t *testing.T) { } func TestToApiRunsV1(t *testing.T) { - metric1 := &model.RunMetric{ + metric1 := &model.RunMetricV1{ Name: "metric-1", NodeID: "node-1", NumberValue: 0.88, Format: "RAW", } - metric2 := &model.RunMetric{ + metric2 := &model.RunMetricV1{ Name: "metric-2", NodeID: "node-2", NumberValue: 0.99, @@ -1482,7 +1482,7 @@ func TestToApiRunsV1(t *testing.T) { WorkflowSpecManifest: "manifest", }, RecurringRunId: "job1", - Metrics: []*model.RunMetric{metric1, metric2}, + Metrics: []*model.RunMetricV1{metric1, metric2}, } modelRun2 := model.Run{ UUID: "run2", @@ -1499,7 +1499,7 @@ func TestToApiRunsV1(t *testing.T) { WorkflowSpecManifest: "manifest", }, RecurringRunId: "job2", - Metrics: []*model.RunMetric{metric2}, + Metrics: []*model.RunMetricV1{metric2}, } apiRuns := toApiRunsV1([]*model.Run{&modelRun1, &modelRun2}) expectedApiRun := []*apiv1beta1.Run{ @@ -1553,80 +1553,6 @@ func TestToApiRunsV1(t *testing.T) { assert.Equal(t, expectedApiRun, apiRuns) } -func TestToApiTask(t *testing.T) { - modelTask := &model.Task{ - UUID: DefaultFakeUUID, - Namespace: "", - PipelineName: "pipeline/my-pipeline", - RunID: NonDefaultFakeUUID, - MLMDExecutionID: "1", - CreatedTimestamp: 1, - FinishedTimestamp: 2, - Fingerprint: "123", - } - apiTask := toApiTaskV1(modelTask) - expectedApiTask := &apiv1beta1.Task{ - Id: DefaultFakeUUID, - Namespace: "", - PipelineName: "pipeline/my-pipeline", - RunId: NonDefaultFakeUUID, - MlmdExecutionID: "1", - CreatedAt: timestamppb.New(time.Unix(1, 0)), - FinishedAt: timestamppb.New(time.Unix(2, 0)), - Fingerprint: "123", - } - - assert.Equal(t, expectedApiTask, apiTask) -} - -func TestToApiTasks(t *testing.T) { - modelTask1 := model.Task{ - UUID: "123e4567-e89b-12d3-a456-426655440000", - Namespace: "ns1", - PipelineName: "namespace/ns1/pipeline/my-pipeline-1", - RunID: "123e4567-e89b-12d3-a456-426655440001", - MLMDExecutionID: "1", - CreatedTimestamp: 1, - FinishedTimestamp: 2, - Fingerprint: "123", - } - modelTask2 := model.Task{ - UUID: "123e4567-e89b-12d3-a456-426655440002", - Namespace: "ns2", - PipelineName: "namespace/ns1/pipeline/my-pipeline-2", - RunID: "123e4567-e89b-12d3-a456-426655440003", - MLMDExecutionID: "2", - CreatedTimestamp: 3, - FinishedTimestamp: 4, - Fingerprint: "124", - } - - apiTasks := toApiTasksV1([]*model.Task{&modelTask1, &modelTask2}) - expectedApiTasks := []*apiv1beta1.Task{ - { - Id: "123e4567-e89b-12d3-a456-426655440000", - Namespace: "ns1", - PipelineName: "namespace/ns1/pipeline/my-pipeline-1", - RunId: "123e4567-e89b-12d3-a456-426655440001", - MlmdExecutionID: "1", - CreatedAt: timestamppb.New(time.Unix(1, 0)), - FinishedAt: timestamppb.New(time.Unix(2, 0)), - Fingerprint: "123", - }, - { - Id: "123e4567-e89b-12d3-a456-426655440002", - Namespace: "ns2", - PipelineName: "namespace/ns1/pipeline/my-pipeline-2", - RunId: "123e4567-e89b-12d3-a456-426655440003", - MlmdExecutionID: "2", - CreatedAt: ×tamppb.Timestamp{Seconds: 3, Nanos: 0}, - FinishedAt: ×tamppb.Timestamp{Seconds: 4, Nanos: 0}, - Fingerprint: "124", - }, - } - assert.Equal(t, expectedApiTasks, apiTasks) -} - func TestCronScheduledJobtoApiJob(t *testing.T) { modelJob := model.Job{ UUID: "job1", @@ -1986,14 +1912,14 @@ func TestToApiJobs(t *testing.T) { } func TestToApiRunMetric(t *testing.T) { - modelRunMetric := &model.RunMetric{ + modelRunMetric := &model.RunMetricV1{ Name: "metric-1", NodeID: "node-1", NumberValue: 0.88, Format: "RAW", } - actualAPIRunMetric := toApiRunMetricV1(modelRunMetric) + actualAPIRunMetric := toAPIRunMetricV1(modelRunMetric) expectedAPIRunMetric := &apiv1beta1.RunMetric{ Name: "metric-1", @@ -2008,14 +1934,14 @@ func TestToApiRunMetric(t *testing.T) { func TestToApiRunMetric_UnknownFormat(t *testing.T) { // This can happen if we accidentally remove an existing format value from proto. - modelRunMetric := &model.RunMetric{ + modelRunMetric := &model.RunMetricV1{ Name: "metric-1", NodeID: "node-1", NumberValue: 0.88, Format: "NotExistValue", } - actualAPIRunMetric := toApiRunMetricV1(modelRunMetric) + actualAPIRunMetric := toAPIRunMetricV1(modelRunMetric) expectedAPIRunMetric := &apiv1beta1.RunMetric{ Name: "metric-1", @@ -3106,748 +3032,6 @@ func Test_toApiRuntimeStatuses(t *testing.T) { assert.Equal(t, expected, got) } -func Test_toModelTask(t *testing.T) { - tests := []struct { - name string - apiTask interface{} - want *model.Task - wantErr bool - errMsg string - }{ - { - "V1 full spec", - &apiv1beta1.Task{ - Id: "1", - Namespace: "ns1", - PipelineName: "namespaces/ns1/pipelines/p1", - RunId: "2", - MlmdExecutionID: "3", - CreatedAt: ×tamppb.Timestamp{Seconds: 4}, - FinishedAt: ×tamppb.Timestamp{Seconds: 5}, - Fingerprint: "6", - }, - &model.Task{ - UUID: "1", - Namespace: "ns1", - PipelineName: "namespaces/ns1/pipelines/p1", - RunID: "2", - MLMDExecutionID: "3", - CreatedTimestamp: 4, - StartedTimestamp: 4, - FinishedTimestamp: 5, - Fingerprint: "6", - Name: "", - ParentTaskId: "", - State: model.RuntimeStateUnspecified, - StateHistory: nil, - MLMDInputs: "", - MLMDOutputs: "", - ChildrenPods: nil, - }, - false, - "", - }, - { - "V2 full spec", - &apiv2beta1.PipelineTaskDetail{ - RunId: "2", - TaskId: "1", - DisplayName: "task", - CreateTime: ×tamppb.Timestamp{Seconds: 4}, - StartTime: ×tamppb.Timestamp{Seconds: 5}, - EndTime: ×tamppb.Timestamp{Seconds: 6}, - State: apiv2beta1.RuntimeState_CANCELING, - ExecutionId: 7, - Inputs: map[string]*apiv2beta1.ArtifactList{ - "a1": { - ArtifactIds: []int64{1, 2, 3}, - }, - }, - Outputs: map[string]*apiv2beta1.ArtifactList{ - "b2": { - ArtifactIds: []int64{4, 5, 6}, - }, - }, - ParentTaskId: "8", - StateHistory: []*apiv2beta1.RuntimeStatus{ - { - UpdateTime: ×tamppb.Timestamp{Seconds: 9}, - State: apiv2beta1.RuntimeState_PAUSED, - }, - }, - ChildTasks: []*apiv2beta1.PipelineTaskDetail_ChildTask{ - { - ChildTask: &apiv2beta1.PipelineTaskDetail_ChildTask_PodName{PodName: "9"}, - }, - { - ChildTask: &apiv2beta1.PipelineTaskDetail_ChildTask_PodName{PodName: "10"}, - }, - }, - }, - &model.Task{ - UUID: "1", - Namespace: "", - PipelineName: "", - RunID: "2", - MLMDExecutionID: "7", - CreatedTimestamp: 4, - StartedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "", - Name: "task", - ParentTaskId: "8", - State: model.RuntimeStateCancelling, - StateHistory: []*model.RuntimeStatus{ - { - UpdateTimeInSec: 9, - State: model.RuntimeStatePaused, - }, - }, - MLMDInputs: `{"a1":{"artifact_ids":[1,2,3]}}`, - MLMDOutputs: `{"b2":{"artifact_ids":[4,5,6]}}`, - ChildrenPods: []string{"9", "10"}, - }, - false, - "", - }, - { - "argo node status", - util.NodeStatus{ - ID: "1", - DisplayName: "node_1", - State: "Pending", - Children: []string{"node3", "node4"}, - StartTime: 4, - CreateTime: 4, - FinishTime: 5, - }, - &model.Task{ - PodName: "1", - CreatedTimestamp: 4, - StartedTimestamp: 4, - FinishedTimestamp: 5, - Name: "node_1", - State: model.RuntimeStatePending, - ChildrenPods: []string{"node3", "node4"}, - }, - false, - "", - }, - { - "invalid type", - apiv2beta1.Run{}, - nil, - true, - "UnknownApiVersionError: Error using Task with go_client.Run", - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := toModelTask(tt.apiTask) - if tt.wantErr { - assert.NotNil(t, err) - assert.Contains(t, err.Error(), tt.errMsg) - assert.Nil(t, got) - } else { - assert.Nil(t, err) - assert.Equal(t, tt.want, got) - } - }) - } -} - -func Test_toModelTasks_v2(t *testing.T) { - argV2 := []*apiv2beta1.PipelineTaskDetail{ - { - RunId: "2", - TaskId: "1", - DisplayName: "task", - CreateTime: ×tamppb.Timestamp{Seconds: 4}, - StartTime: ×tamppb.Timestamp{Seconds: 5}, - EndTime: ×tamppb.Timestamp{Seconds: 6}, - State: apiv2beta1.RuntimeState_FAILED, - ExecutionId: 7, - Inputs: map[string]*apiv2beta1.ArtifactList{ - "a1": { - ArtifactIds: []int64{1, 2, 3}, - }, - }, - Outputs: map[string]*apiv2beta1.ArtifactList{ - "b2": { - ArtifactIds: []int64{4, 5, 6}, - }, - }, - ParentTaskId: "8", - StateHistory: []*apiv2beta1.RuntimeStatus{ - { - UpdateTime: ×tamppb.Timestamp{Seconds: 9}, - State: apiv2beta1.RuntimeState_FAILED, - Error: util.ToRpcStatus(util.NewInvalidInputError("Input argument is invalid")), - }, - }, - ChildTasks: []*apiv2beta1.PipelineTaskDetail_ChildTask{ - { - ChildTask: &apiv2beta1.PipelineTaskDetail_ChildTask_PodName{PodName: "9"}, - }, - { - ChildTask: &apiv2beta1.PipelineTaskDetail_ChildTask_PodName{PodName: "10"}, - }, - }, - }, - } - expectedV2 := []*model.Task{ - { - UUID: "1", - Namespace: "", - PipelineName: "", - RunID: "2", - MLMDExecutionID: "7", - CreatedTimestamp: 4, - StartedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "", - Name: "task", - ParentTaskId: "8", - State: model.RuntimeStateFailed, - StateHistory: []*model.RuntimeStatus{ - { - UpdateTimeInSec: 9, - State: model.RuntimeStateFailed, - Error: util.ToError(util.ToRpcStatus(util.NewInvalidInputError("Input argument is invalid"))), - }, - }, - MLMDInputs: `{"a1":{"artifact_ids":[1,2,3]}}`, - MLMDOutputs: `{"b2":{"artifact_ids":[4,5,6]}}`, - ChildrenPods: []string{"9", "10"}, - }, - } - gotV2, err := toModelTasks(argV2) - assert.Nil(t, err) - assert.Equal(t, expectedV2, gotV2) -} - -func Test_toModelTasks_wf(t *testing.T) { - expectedWf := []*model.Task{ - { - PodName: "run1-file-passing-pipelines-node0", - Namespace: "kubeflow", - RunID: "run1_uid_true", - CreatedTimestamp: -62135596800, - StartedTimestamp: 1675734919, - FinishedTimestamp: 1675735118, - Name: "boudary_exec_id", - State: model.RuntimeStateSucceeded, - ChildrenPods: []string{"boudary_exec_id-node1"}, - }, - { - PodName: "run1-print-text-node1", - Namespace: "kubeflow", - RunID: "run1_uid_true", - CreatedTimestamp: -62135596800, - StartedTimestamp: 1675735015, - FinishedTimestamp: 1675735041, - Name: "print-text", - State: model.RuntimeStateSucceeded, - }, - } - argWf, err := util.NewWorkflowFromBytes([]byte(`{ "kind": "Workflow", "apiVersion": "argoproj.io/v1alpha1", "metadata": { "name": "run1", "namespace": "kubeflow", "uid": "run1_uid", "labels": { "pipeline/runid": "run1_uid_true" } }, "status": { "phase": "Succeeded", "startedAt": "2023-02-07T01:55:19Z", "finishedAt": "2023-02-07T01:58:38Z", "progress": "9/9", "nodes": { "boudary_exec_id-node0": { "id": "boudary_exec_id-node0", "name": "boudary_exec_id", "displayName": "boudary_exec_id", "type": "DAG", "templateName": "file-passing-pipelines", "templateScope": "local/boudary_exec_id", "phase": "Succeeded", "startedAt": "2023-02-07T01:55:19Z", "finishedAt": "2023-02-07T01:58:38Z", "progress": "9/9", "resourcesDuration": {"cpu": 53,"memory": 19}, "children": ["boudary_exec_id-node1"], "outboundNodes": ["boudary_exec_id-node1"] }, "boudary_exec_id-node1": { "id": "boudary_exec_id-node1", "name": "boudary_exec_id.print-text", "displayName": "print-text", "type": "Pod", "templateName": "print-text", "templateScope": "local/boudary_exec_id", "phase": "Succeeded", "boundaryID": "boudary_exec_id", "startedAt": "2023-02-07T01:56:55Z", "finishedAt": "2023-02-07T01:57:21Z", "progress": "1/1", "resourcesDuration": {"cpu": 15,"memory": 7}, "inputs": {"artifacts": [{"name": "repeat-line-output_text", "path": "/tmp/inputs/text/data", "s3": {"key": "art1.tgz"}}]}, "outputs": {"artifacts": [{"name": "main-logs", "s3": {"key": "art1.log"}}], "exitCode": "0"}, "hostNodeName": "gke-kfp-node1" } } }}`)) - assert.Nil(t, err) - - gotWf, err := toModelTasks(argWf) - assert.Nil(t, err) - if !cmp.Equal(expectedWf, gotWf) { - t.Errorf("toModelTasks() diff: %v", cmp.Diff(gotWf, expectedWf)) - } -} - -func Test_toApiTaskV1(t *testing.T) { - tests := []struct { - name string - args *model.Task - want *apiv1beta1.Task - }{ - { - "v1 spec", - &model.Task{ - UUID: "1", - Namespace: "ns1", - PipelineName: "namespaces/ns1/pipelines/p1", - RunID: "2", - MLMDExecutionID: "3", - CreatedTimestamp: 4, - StartedTimestamp: 4, - FinishedTimestamp: 5, - Fingerprint: "6", - Name: "", - ParentTaskId: "", - State: model.RuntimeStateUnspecified, - StateHistory: nil, - MLMDInputs: "", - MLMDOutputs: "", - ChildrenPods: nil, - }, - &apiv1beta1.Task{ - Id: "1", - Namespace: "ns1", - PipelineName: "namespaces/ns1/pipelines/p1", - RunId: "2", - MlmdExecutionID: "3", - CreatedAt: ×tamppb.Timestamp{Seconds: 4}, - FinishedAt: ×tamppb.Timestamp{Seconds: 5}, - Fingerprint: "6", - }, - }, - { - "v2 spec", - &model.Task{ - UUID: "1", - Namespace: "ns1", - PipelineName: "namespaces/ns1/pipelines/p1", - RunID: "2", - MLMDExecutionID: "7", - CreatedTimestamp: 4, - StartedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "fp", - Name: "task", - ParentTaskId: "8", - State: model.RuntimeStateCancelling, - StateHistory: []*model.RuntimeStatus{ - { - UpdateTimeInSec: 9, - State: model.RuntimeStatePaused, - Error: util.ToError(util.ToRpcStatus(util.NewInvalidInputError("Sample error2"))), - }, - }, - MLMDInputs: `{"a1":{"artifact_ids":[1,2,3]}}`, - MLMDOutputs: `{"b2":{"artifact_ids":[4,5,6]}}`, - ChildrenPods: []string{"9", "10"}, - }, - &apiv1beta1.Task{ - Id: "1", - Namespace: "ns1", - PipelineName: "namespaces/ns1/pipelines/p1", - RunId: "2", - MlmdExecutionID: "7", - CreatedAt: ×tamppb.Timestamp{Seconds: 4}, - FinishedAt: ×tamppb.Timestamp{Seconds: 6}, - Fingerprint: "fp", - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got := toApiTaskV1(tt.args) - assert.Equal(t, tt.want, got) - }) - } -} - -func Test_toApiTasksV1(t *testing.T) { - arg := []*model.Task{ - { - UUID: "1", - Namespace: "ns1", - PipelineName: "namespaces/ns1/pipelines/p1", - RunID: "2", - MLMDExecutionID: "3", - CreatedTimestamp: 4, - StartedTimestamp: 4, - FinishedTimestamp: 5, - Fingerprint: "6", - Name: "", - ParentTaskId: "", - State: model.RuntimeStateUnspecified, - StateHistory: nil, - MLMDInputs: "", - MLMDOutputs: "", - ChildrenPods: nil, - }, - { - UUID: "1", - Namespace: "ns1", - PipelineName: "namespaces/ns1/pipelines/p1", - RunID: "2", - MLMDExecutionID: "7", - CreatedTimestamp: 4, - StartedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "fp", - Name: "task", - ParentTaskId: "8", - State: model.RuntimeStateCancelling, - StateHistory: []*model.RuntimeStatus{ - { - UpdateTimeInSec: 9, - State: model.RuntimeStatePaused, - Error: util.ToError(util.ToRpcStatus(util.NewInvalidInputError("Sample error2"))), - }, - }, - MLMDInputs: `{"a1":{"artifact_ids":[1,2,3]}}`, - MLMDOutputs: `{"b2":{"artifact_ids":[4,5,6]}}`, - ChildrenPods: []string{"9", "10"}, - }, - } - expected := []*apiv1beta1.Task{ - { - Id: "1", - Namespace: "ns1", - PipelineName: "namespaces/ns1/pipelines/p1", - RunId: "2", - MlmdExecutionID: "3", - CreatedAt: ×tamppb.Timestamp{Seconds: 4}, - FinishedAt: ×tamppb.Timestamp{Seconds: 5}, - Fingerprint: "6", - }, - { - Id: "1", - Namespace: "ns1", - PipelineName: "namespaces/ns1/pipelines/p1", - RunId: "2", - MlmdExecutionID: "7", - CreatedAt: ×tamppb.Timestamp{Seconds: 4}, - FinishedAt: ×tamppb.Timestamp{Seconds: 6}, - Fingerprint: "fp", - }, - } - got := toApiTasksV1(arg) - assert.Equal(t, expected, got) -} - -func Test_toApiPipelineTaskDetail(t *testing.T) { - tests := []struct { - name string - args *model.Task - want *apiv2beta1.PipelineTaskDetail - wantErr bool - errMsg string - }{ - { - "v1 spec", - &model.Task{ - UUID: "1", - Namespace: "ns1", - PipelineName: "namespaces/ns1/pipelines/p1", - RunID: "2", - MLMDExecutionID: "3", - CreatedTimestamp: 4, - StartedTimestamp: 4, - FinishedTimestamp: 5, - Fingerprint: "6", - State: model.RuntimeStateUnspecified, - }, - &apiv2beta1.PipelineTaskDetail{ - RunId: "2", - TaskId: "1", - DisplayName: "", - CreateTime: ×tamppb.Timestamp{Seconds: 4}, - StartTime: ×tamppb.Timestamp{Seconds: 4}, - EndTime: ×tamppb.Timestamp{Seconds: 5}, - State: apiv2beta1.RuntimeState_RUNTIME_STATE_UNSPECIFIED, - ExecutionId: 3, - }, - false, - "", - }, - { - "v2 spec", - &model.Task{ - UUID: "1", - Namespace: "ns1", - PipelineName: "namespaces/ns1/pipelines/p1", - RunID: "2", - MLMDExecutionID: "7", - CreatedTimestamp: 4, - StartedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "fp", - Name: "task", - ParentTaskId: "8", - State: model.RuntimeStateCancelling, - StateHistory: []*model.RuntimeStatus{ - { - UpdateTimeInSec: 9, - State: model.RuntimeStatePaused, - Error: util.ToError(util.ToRpcStatus(util.NewInvalidInputError("Sample error2"))), - }, - }, - MLMDInputs: `{"a1":{"artifact_ids":[1,2,3]}}`, - MLMDOutputs: `{"b2":{"artifact_ids":[4,5,6]}}`, - ChildrenPods: []string{"9", "10"}, - }, - &apiv2beta1.PipelineTaskDetail{ - RunId: "2", - TaskId: "1", - DisplayName: "task", - CreateTime: ×tamppb.Timestamp{Seconds: 4}, - StartTime: ×tamppb.Timestamp{Seconds: 5}, - EndTime: ×tamppb.Timestamp{Seconds: 6}, - State: apiv2beta1.RuntimeState_CANCELING, - ExecutionId: 7, - Inputs: map[string]*apiv2beta1.ArtifactList{ - "a1": { - ArtifactIds: []int64{1, 2, 3}, - }, - }, - Outputs: map[string]*apiv2beta1.ArtifactList{ - "b2": { - ArtifactIds: []int64{4, 5, 6}, - }, - }, - ParentTaskId: "8", - StateHistory: []*apiv2beta1.RuntimeStatus{ - { - UpdateTime: ×tamppb.Timestamp{Seconds: 9}, - State: apiv2beta1.RuntimeState_PAUSED, - Error: util.ToRpcStatus(util.NewInvalidInputError("Sample error2")), - }, - }, - ChildTasks: []*apiv2beta1.PipelineTaskDetail_ChildTask{ - { - ChildTask: &apiv2beta1.PipelineTaskDetail_ChildTask_PodName{PodName: "9"}, - }, - { - ChildTask: &apiv2beta1.PipelineTaskDetail_ChildTask_PodName{PodName: "10"}, - }, - }, - }, - false, - "", - }, - { - "v2 wrong inputs", - &model.Task{ - UUID: "1", - Namespace: "ns1", - PipelineName: "namespaces/ns1/pipelines/p1", - RunID: "2", - MLMDExecutionID: "7", - CreatedTimestamp: 4, - StartedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "fp", - Name: "task", - ParentTaskId: "8", - State: model.RuntimeStateCancelling, - StateHistory: []*model.RuntimeStatus{ - { - UpdateTimeInSec: 9, - State: model.RuntimeStatePaused, - Error: util.ToError(util.ToRpcStatus(util.NewInvalidInputError("Sample error2"))), - }, - }, - MLMDInputs: `{"a1":{"artifact_ids":[1,2,3]}`, - MLMDOutputs: `{"b2":{"artifact_ids":[4,5,6]}}`, - ChildrenPods: []string{"9", "10"}, - }, - &apiv2beta1.PipelineTaskDetail{ - RunId: "2", - TaskId: "1", - }, - true, - "Failed to convert task's internal representation to its API counterpart due to error parsing inputs", - }, - { - "v2 wrong outputs", - &model.Task{ - UUID: "1", - Namespace: "ns1", - PipelineName: "namespaces/ns1/pipelines/p1", - RunID: "2", - MLMDExecutionID: "7", - CreatedTimestamp: 4, - StartedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "fp", - Name: "task", - ParentTaskId: "8", - State: model.RuntimeStateCancelling, - StateHistory: []*model.RuntimeStatus{ - { - UpdateTimeInSec: 9, - State: model.RuntimeStatePaused, - Error: util.ToError(util.ToRpcStatus(util.NewInvalidInputError("Sample error2"))), - }, - }, - MLMDInputs: `{"a1":{"artifact_ids":[1,2,3]}}`, - MLMDOutputs: `{"b2":{"artifact_ids":[4,5,6]}`, - ChildrenPods: []string{"9", "10"}, - }, - &apiv2beta1.PipelineTaskDetail{ - RunId: "2", - TaskId: "1", - }, - true, - "Failed to convert task's internal representation to its API counterpart due to error parsing outputs", - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got := toApiPipelineTaskDetail(tt.args) - if tt.wantErr { - assert.Equal(t, tt.want.GetRunId(), got.GetRunId()) - assert.Equal(t, tt.want.GetTaskId(), got.GetTaskId()) - assert.Contains(t, got.Error.Message, tt.errMsg) - } else { - assert.Equal(t, tt.want, got) - } - }) - } -} - -func Test_toApiPipelineTaskDetails(t *testing.T) { - args := []*model.Task{ - { - UUID: "1", - Namespace: "ns1", - PipelineName: "namespaces/ns1/pipelines/p1", - RunID: "2", - MLMDExecutionID: "3", - CreatedTimestamp: 4, - StartedTimestamp: 4, - FinishedTimestamp: 5, - Fingerprint: "6", - State: model.RuntimeStateUnspecified, - }, - { - UUID: "1", - Namespace: "ns1", - PipelineName: "namespaces/ns1/pipelines/p1", - RunID: "2", - MLMDExecutionID: "7", - CreatedTimestamp: 4, - StartedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "fp", - Name: "task", - ParentTaskId: "8", - State: model.RuntimeStateCancelling, - StateHistory: []*model.RuntimeStatus{ - { - UpdateTimeInSec: 9, - State: model.RuntimeStatePaused, - Error: util.ToError(util.ToRpcStatus(util.NewInvalidInputError("Sample error2"))), - }, - }, - MLMDInputs: `{"a1":{"artifact_ids":[1,2,3]}}`, - MLMDOutputs: `{"b2":{"artifact_ids":[4,5,6]}}`, - ChildrenPods: []string{"9", "10"}, - }, - } - expected := []*apiv2beta1.PipelineTaskDetail{ - { - RunId: "2", - TaskId: "1", - DisplayName: "", - CreateTime: ×tamppb.Timestamp{Seconds: 4}, - StartTime: ×tamppb.Timestamp{Seconds: 4}, - EndTime: ×tamppb.Timestamp{Seconds: 5}, - State: apiv2beta1.RuntimeState_RUNTIME_STATE_UNSPECIFIED, - ExecutionId: 3, - }, - { - RunId: "2", - TaskId: "1", - DisplayName: "task", - CreateTime: ×tamppb.Timestamp{Seconds: 4}, - StartTime: ×tamppb.Timestamp{Seconds: 5}, - EndTime: ×tamppb.Timestamp{Seconds: 6}, - State: apiv2beta1.RuntimeState_CANCELING, - ExecutionId: 7, - Inputs: map[string]*apiv2beta1.ArtifactList{ - "a1": { - ArtifactIds: []int64{1, 2, 3}, - }, - }, - Outputs: map[string]*apiv2beta1.ArtifactList{ - "b2": { - ArtifactIds: []int64{4, 5, 6}, - }, - }, - ParentTaskId: "8", - StateHistory: []*apiv2beta1.RuntimeStatus{ - { - UpdateTime: ×tamppb.Timestamp{Seconds: 9}, - State: apiv2beta1.RuntimeState_PAUSED, - Error: util.ToRpcStatus(util.NewInvalidInputError("Sample error2")), - }, - }, - ChildTasks: []*apiv2beta1.PipelineTaskDetail_ChildTask{ - { - ChildTask: &apiv2beta1.PipelineTaskDetail_ChildTask_PodName{PodName: "9"}, - }, - { - ChildTask: &apiv2beta1.PipelineTaskDetail_ChildTask_PodName{PodName: "10"}, - }, - }, - }, - } - got := toApiPipelineTaskDetails(args) - assert.Equal(t, expected, got) - - args2 := []*model.Task{ - { - UUID: "1", - Namespace: "ns1", - PipelineName: "namespaces/ns1/pipelines/p1", - RunID: "2", - MLMDExecutionID: "7", - CreatedTimestamp: 4, - StartedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "fp", - Name: "task", - ParentTaskId: "8", - State: model.RuntimeStateCancelling, - StateHistory: []*model.RuntimeStatus{ - { - UpdateTimeInSec: 9, - State: model.RuntimeStatePaused, - Error: util.ToError(util.ToRpcStatus(util.NewInvalidInputError("Sample error2"))), - }, - }, - MLMDInputs: `{"a1":{"artifact_ids":[1,2,3]}`, - MLMDOutputs: `{"b2":{"artifact_ids":[4,5,6]}}`, - ChildrenPods: []string{"9", "10"}, - }, - { - UUID: "1", - Namespace: "ns1", - PipelineName: "namespaces/ns1/pipelines/p1", - RunID: "2", - MLMDExecutionID: "7", - CreatedTimestamp: 4, - StartedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "fp", - Name: "task", - ParentTaskId: "8", - State: model.RuntimeStateCancelling, - StateHistory: []*model.RuntimeStatus{ - { - UpdateTimeInSec: 9, - State: model.RuntimeStatePaused, - Error: util.ToError(util.ToRpcStatus(util.NewInvalidInputError("Sample error2"))), - }, - }, - MLMDInputs: `{"a1":{"artifact_ids":[1,2,3]}}`, - MLMDOutputs: `{"b2":{"artifact_ids":[4,5,6]}`, - ChildrenPods: []string{"9", "10"}, - }, - } - got2 := toApiPipelineTaskDetails(args2) - assert.Contains(t, got2[0].Error.Message, "Failed to convert task's internal representation to its API counterpart due to error parsing inputs") - assert.Contains(t, got2[1].Error.Message, "Failed to convert task's internal representation to its API counterpart due to error parsing outputs") - expected2 := &apiv2beta1.PipelineTaskDetail{ - RunId: "2", - TaskId: "1", - } - expected2.Error = got2[0].GetError() - assert.Equal(t, expected2, got2[0]) - expected2.Error = got2[1].GetError() - assert.Equal(t, expected2, got2[1]) -} - func TestToModelRun(t *testing.T) { tests := []struct { name string @@ -3884,61 +3068,6 @@ func TestToModelRun(t *testing.T) { RunDetails: &apiv2beta1.RunDetails{ PipelineContextId: 10, PipelineRunContextId: 11, - TaskDetails: []*apiv2beta1.PipelineTaskDetail{ - { - RunId: "run1", - TaskId: "task1", - DisplayName: "this is task", - CreateTime: timestamppb.New(time.Unix(11, 0)), - StartTime: timestamppb.New(time.Unix(12, 0)), - EndTime: timestamppb.New(time.Unix(13, 0)), - ExecutorDetail: nil, - State: apiv2beta1.RuntimeState_FAILED, - ExecutionId: 14, - Inputs: map[string]*apiv2beta1.ArtifactList{ - "a1": {ArtifactIds: []int64{1, 2, 3}}, - }, - Outputs: map[string]*apiv2beta1.ArtifactList{ - "b2": {ArtifactIds: []int64{4, 5, 6}}, - }, - StateHistory: []*apiv2beta1.RuntimeStatus{ - { - UpdateTime: ×tamppb.Timestamp{Seconds: 15}, - State: apiv2beta1.RuntimeState_FAILED, - Error: util.ToRpcStatus(util.NewInvalidInputError("Input argument is invalid")), - }, - }, - ChildTasks: []*apiv2beta1.PipelineTaskDetail_ChildTask{ - { - ChildTask: &apiv2beta1.PipelineTaskDetail_ChildTask_PodName{PodName: "task2"}, - }, - }, - }, - { - RunId: "run1", - TaskId: "task2", - DisplayName: "this is task 2", - CreateTime: timestamppb.New(time.Unix(11, 0)), - StartTime: timestamppb.New(time.Unix(12, 0)), - EndTime: timestamppb.New(time.Unix(13, 0)), - ExecutorDetail: nil, - State: apiv2beta1.RuntimeState_CANCELED, - ExecutionId: 14, - Inputs: map[string]*apiv2beta1.ArtifactList{ - "a1": {ArtifactIds: []int64{1, 2, 3}}, - }, - Outputs: map[string]*apiv2beta1.ArtifactList{ - "b2": {ArtifactIds: []int64{4, 5, 6}}, - }, - ParentTaskId: "task1", - StateHistory: []*apiv2beta1.RuntimeStatus{ - { - UpdateTime: ×tamppb.Timestamp{Seconds: 15}, - State: apiv2beta1.RuntimeState_CANCELED, - }, - }, - }, - }, }, RecurringRunId: "job1", StateHistory: []*apiv2beta1.RuntimeStatus{ @@ -3979,56 +3108,8 @@ func TestToModelRun(t *testing.T) { FinishedAtInSec: 3, PipelineContextId: 0, PipelineRunContextId: 0, - TaskDetails: []*model.Task{ - { - UUID: "task1", - Namespace: "", - PipelineName: "", - RunID: "run1", - MLMDExecutionID: "14", - CreatedTimestamp: 11, - StartedTimestamp: 12, - FinishedTimestamp: 13, - Fingerprint: "", - Name: "this is task", - State: model.RuntimeStateFailed, - MLMDInputs: `{"a1":{"artifact_ids":[1,2,3]}}`, - MLMDOutputs: `{"b2":{"artifact_ids":[4,5,6]}}`, - StateHistory: []*model.RuntimeStatus{ - { - UpdateTimeInSec: 15, - State: model.RuntimeStateFailed, - Error: util.ToError(util.ToRpcStatus(util.NewInvalidInputError("Input argument is invalid"))), - }, - }, - ChildrenPods: []string{"task2"}, - }, - { - UUID: "task2", - Namespace: "", - PipelineName: "", - RunID: "run1", - MLMDExecutionID: "14", - CreatedTimestamp: 11, - StartedTimestamp: 12, - FinishedTimestamp: 13, - Fingerprint: "", - Name: "this is task 2", - ParentTaskId: "task1", - State: model.RuntimeStateCanceled, - MLMDInputs: `{"a1":{"artifact_ids":[1,2,3]}}`, - MLMDOutputs: `{"b2":{"artifact_ids":[4,5,6]}}`, - StateHistory: []*model.RuntimeStatus{ - { - UpdateTimeInSec: 15, - State: model.RuntimeStateCanceled, - }, - }, - }, - }, }, ResourceReferences: nil, - Metrics: nil, Namespace: "", K8SName: "", }, @@ -4080,31 +3161,6 @@ func TestToModelRun(t *testing.T) { RunDetails: &apiv2beta1.RunDetails{ PipelineContextId: 10, PipelineRunContextId: 11, - TaskDetails: []*apiv2beta1.PipelineTaskDetail{ - { - RunId: "run2", - TaskId: "task1", - DisplayName: "this is task", - CreateTime: timestamppb.New(time.Unix(11, 0)), - StartTime: timestamppb.New(time.Unix(12, 0)), - EndTime: timestamppb.New(time.Unix(13, 0)), - ExecutorDetail: nil, - State: apiv2beta1.RuntimeState_RUNNING, - ExecutionId: 14, - Inputs: map[string]*apiv2beta1.ArtifactList{ - "a1": {ArtifactIds: []int64{1, 2, 3}}, - }, - Outputs: map[string]*apiv2beta1.ArtifactList{ - "b2": {ArtifactIds: []int64{4, 5, 6}}, - }, - StateHistory: []*apiv2beta1.RuntimeStatus{ - { - UpdateTime: ×tamppb.Timestamp{Seconds: 15}, - State: apiv2beta1.RuntimeState_RUNNING, - }, - }, - }, - }, }, RecurringRunId: "job1", StateHistory: []*apiv2beta1.RuntimeStatus{ @@ -4141,32 +3197,8 @@ func TestToModelRun(t *testing.T) { FinishedAtInSec: 3, PipelineContextId: 0, PipelineRunContextId: 0, - TaskDetails: []*model.Task{ - { - UUID: "task1", - Namespace: "", - PipelineName: "", - RunID: "run2", - MLMDExecutionID: "14", - CreatedTimestamp: 11, - StartedTimestamp: 12, - FinishedTimestamp: 13, - Fingerprint: "", - Name: "this is task", - State: model.RuntimeStateRunning, - MLMDInputs: `{"a1":{"artifact_ids":[1,2,3]}}`, - MLMDOutputs: `{"b2":{"artifact_ids":[4,5,6]}}`, - StateHistory: []*model.RuntimeStatus{ - { - UpdateTimeInSec: 15, - State: model.RuntimeStateRunning, - }, - }, - }, - }, }, ResourceReferences: nil, - Metrics: nil, Namespace: "", K8SName: "", }, @@ -4201,61 +3233,7 @@ func TestToModelRun(t *testing.T) { RunDetails: &apiv2beta1.RunDetails{ PipelineContextId: 10, PipelineRunContextId: 11, - TaskDetails: []*apiv2beta1.PipelineTaskDetail{ - { - RunId: "run1", - TaskId: "task1", - DisplayName: "this is task", - CreateTime: timestamppb.New(time.Unix(11, 0)), - StartTime: timestamppb.New(time.Unix(12, 0)), - EndTime: timestamppb.New(time.Unix(13, 0)), - ExecutorDetail: nil, - State: apiv2beta1.RuntimeState_FAILED, - ExecutionId: 14, - Inputs: map[string]*apiv2beta1.ArtifactList{ - "a1": {ArtifactIds: []int64{1, 2, 3}}, - }, - Outputs: map[string]*apiv2beta1.ArtifactList{ - "b2": {ArtifactIds: []int64{4, 5, 6}}, - }, - StateHistory: []*apiv2beta1.RuntimeStatus{ - { - UpdateTime: ×tamppb.Timestamp{Seconds: 15}, - State: apiv2beta1.RuntimeState_FAILED, - Error: util.ToRpcStatus(util.NewInvalidInputError("Input argument is invalid")), - }, - }, - ChildTasks: []*apiv2beta1.PipelineTaskDetail_ChildTask{ - { - ChildTask: &apiv2beta1.PipelineTaskDetail_ChildTask_PodName{PodName: "task2"}, - }, - }, - }, - { - RunId: "run1", - TaskId: "task2", - DisplayName: "this is task 2", - CreateTime: timestamppb.New(time.Unix(11, 0)), - StartTime: timestamppb.New(time.Unix(12, 0)), - EndTime: timestamppb.New(time.Unix(13, 0)), - ExecutorDetail: nil, - State: apiv2beta1.RuntimeState_CANCELED, - ExecutionId: 14, - Inputs: map[string]*apiv2beta1.ArtifactList{ - "a1": {ArtifactIds: []int64{1, 2, 3}}, - }, - Outputs: map[string]*apiv2beta1.ArtifactList{ - "b2": {ArtifactIds: []int64{4, 5, 6}}, - }, - ParentTaskId: "task1", - StateHistory: []*apiv2beta1.RuntimeStatus{ - { - UpdateTime: ×tamppb.Timestamp{Seconds: 15}, - State: apiv2beta1.RuntimeState_CANCELED, - }, - }, - }, - }, + TaskDetails: []*apiv2beta1.PipelineTaskDetail{}, }, RecurringRunId: "job1", StateHistory: []*apiv2beta1.RuntimeStatus{ @@ -4446,33 +3424,8 @@ func Test_toApiRun(t *testing.T) { FinishedAtInSec: 3, PipelineContextId: 10, PipelineRunContextId: 11, - TaskDetails: []*model.Task{ - { - UUID: "task1", - Namespace: "", - PipelineName: "", - RunID: "run2", - MLMDExecutionID: "14", - CreatedTimestamp: 11, - StartedTimestamp: 12, - FinishedTimestamp: 13, - Fingerprint: "", - Name: "this is task", - State: model.RuntimeStateFailed, - MLMDInputs: `{"a1":{"artifact_ids":[1,2,3]}}`, - MLMDOutputs: `{"b2":{"artifact_ids":[4,5,6]}}`, - StateHistory: []*model.RuntimeStatus{ - { - UpdateTimeInSec: 15, - State: model.RuntimeStateFailed, - Error: util.ToError(util.ToRpcStatus(util.NewInvalidInputError("Input argument is invalid"))), - }, - }, - }, - }, }, ResourceReferences: nil, - Metrics: nil, Namespace: "", K8SName: "", }, @@ -4519,32 +3472,6 @@ func Test_toApiRun(t *testing.T) { RunDetails: &apiv2beta1.RunDetails{ PipelineContextId: 10, PipelineRunContextId: 11, - TaskDetails: []*apiv2beta1.PipelineTaskDetail{ - { - RunId: "run2", - TaskId: "task1", - DisplayName: "this is task", - CreateTime: timestamppb.New(time.Unix(11, 0)), - StartTime: timestamppb.New(time.Unix(12, 0)), - EndTime: timestamppb.New(time.Unix(13, 0)), - ExecutorDetail: nil, - State: apiv2beta1.RuntimeState_FAILED, - ExecutionId: 14, - Inputs: map[string]*apiv2beta1.ArtifactList{ - "a1": {ArtifactIds: []int64{1, 2, 3}}, - }, - Outputs: map[string]*apiv2beta1.ArtifactList{ - "b2": {ArtifactIds: []int64{4, 5, 6}}, - }, - StateHistory: []*apiv2beta1.RuntimeStatus{ - { - UpdateTime: ×tamppb.Timestamp{Seconds: 15}, - State: apiv2beta1.RuntimeState_FAILED, - Error: util.ToRpcStatus(util.NewInvalidInputError("Input argument is invalid")), - }, - }, - }, - }, }, RecurringRunId: "job1", StateHistory: []*apiv2beta1.RuntimeStatus{ @@ -4587,33 +3514,9 @@ func Test_toApiRun(t *testing.T) { FinishedAtInSec: 3, PipelineContextId: 10, PipelineRunContextId: 11, - TaskDetails: []*model.Task{ - { - UUID: "task1", - Namespace: "", - PipelineName: "", - RunID: "run2", - MLMDExecutionID: "14", - CreatedTimestamp: 11, - StartedTimestamp: 12, - FinishedTimestamp: 13, - Fingerprint: "", - Name: "this is task", - State: model.RuntimeStateCancelling, - MLMDInputs: `{"a1":{"artifact_ids":[1,2,3]}}`, - MLMDOutputs: `{"b2":{"artifact_ids":[4,5,6]}}`, - StateHistory: []*model.RuntimeStatus{ - { - UpdateTimeInSec: 15, - State: model.RuntimeStateCancelling, - }, - }, - ChildrenPods: []string{"task3", "task4"}, - }, - }, + TaskDetails: []*model.Task{}, }, ResourceReferences: nil, - Metrics: nil, Namespace: "", K8SName: "", }, @@ -4649,30 +3552,7 @@ func Test_toApiRun(t *testing.T) { FinishedAtInSec: 3, PipelineContextId: 10, PipelineRunContextId: 11, - TaskDetails: []*model.Task{ - { - UUID: "task1", - Namespace: "", - PipelineName: "", - RunID: "run2", - MLMDExecutionID: "14", - CreatedTimestamp: 11, - StartedTimestamp: 12, - FinishedTimestamp: 13, - Fingerprint: "", - Name: "this is task", - State: model.RuntimeStatePaused, - MLMDInputs: `{"a1":{"artifact_ids":[1,2,3]}}`, - MLMDOutputs: `{"b2":{"artifact_ids":[4,5,6]}}`, - StateHistory: []*model.RuntimeStatus{ - { - UpdateTimeInSec: 15, - State: model.RuntimeStatePaused, - }, - }, - ChildrenPods: []string{"task3", "task4"}, - }, - }, + TaskDetails: []*model.Task{}, }, ResourceReferences: nil, Metrics: nil, diff --git a/backend/src/apiserver/server/api_util.go b/backend/src/apiserver/server/api_util.go index 7f9d7745413..294e764415f 100644 --- a/backend/src/apiserver/server/api_util.go +++ b/backend/src/apiserver/server/api_util.go @@ -434,7 +434,7 @@ func apiParametersToStringV1(params []*apiv1beta1.Parameter) string { } // Validates a run metric fields from request. -func validateRunMetric(metric *model.RunMetric) error { +func validateRunMetricV1(metric *model.RunMetricV1) error { matched, err := regexp.MatchString(metricNamePattern, metric.Name) if err != nil { // This should never happen. diff --git a/backend/src/apiserver/server/api_util_test.go b/backend/src/apiserver/server/api_util_test.go index 29982a5ee42..ba6ff8e3568 100644 --- a/backend/src/apiserver/server/api_util_test.go +++ b/backend/src/apiserver/server/api_util_test.go @@ -328,27 +328,27 @@ func TestGetExperimentIDFromResourceReferences(t *testing.T) { } func TestValidateRunMetric_Pass(t *testing.T) { - metric := &model.RunMetric{ + metric := &model.RunMetricV1{ Name: "foo", NodeID: "node-1", } - err := validateRunMetric(metric) + err := validateRunMetricV1(metric) assert.Nil(t, err) } func TestValidateRunMetric_InvalidNames(t *testing.T) { - metric := &model.RunMetric{ + metric := &model.RunMetricV1{ NodeID: "node-1", } // Empty name - err := validateRunMetric(metric) + err := validateRunMetricV1(metric) AssertUserError(t, err, codes.InvalidArgument) // Unallowed character metric.Name = "$" - err = validateRunMetric(metric) + err = validateRunMetricV1(metric) AssertUserError(t, err, codes.InvalidArgument) // Name is too long @@ -357,22 +357,22 @@ func TestValidateRunMetric_InvalidNames(t *testing.T) { bytes[i] = 'a' } metric.Name = string(bytes) - err = validateRunMetric(metric) + err = validateRunMetricV1(metric) AssertUserError(t, err, codes.InvalidArgument) } -func TestValidateRunMetric_InvalidNodeIDs(t *testing.T) { - metric := &model.RunMetric{ +func TestValidateRunMetricV1_InvalidNodeIDs(t *testing.T) { + metric := &model.RunMetricV1{ Name: "a", } // Empty node ID - err := validateRunMetric(metric) + err := validateRunMetricV1(metric) AssertUserError(t, err, codes.InvalidArgument) // Node ID is too long metric.NodeID = string(make([]byte, 129)) - err = validateRunMetric(metric) + err = validateRunMetricV1(metric) AssertUserError(t, err, codes.InvalidArgument) } @@ -392,7 +392,8 @@ func TestPipelineSpecStructToYamlString_DirectSpec(t *testing.T) { splitTemplate := strings.Split(template, "\n---\n") pipelineSpecJson, _ := yaml.YAMLToJSON([]byte(splitTemplate[0])) - protojson.Unmarshal(pipelineSpecJson, &pipeline) + err := protojson.Unmarshal(pipelineSpecJson, &pipeline) + assert.Nil(t, err) actualTemplate, err := pipelineSpecStructToYamlString(&pipeline) assert.Nil(t, err) @@ -416,10 +417,13 @@ func TestPipelineSpecStructToYamlString_WithPlatform(t *testing.T) { splitTemplate := strings.Split(template, "\n---\n") pipelineSpecJson, _ := yaml.YAMLToJSON([]byte(splitTemplate[0])) - protojson.Unmarshal(pipelineSpecJson, &pipelineSpec) + + err := protojson.Unmarshal(pipelineSpecJson, &pipelineSpec) + assert.Nil(t, err) platformSpecJson, _ := yaml.YAMLToJSON([]byte(splitTemplate[1])) - protojson.Unmarshal(platformSpecJson, &platformSpec) + err = protojson.Unmarshal(platformSpecJson, &platformSpec) + assert.Nil(t, err) pipelineSpecValue := structpb.NewStructValue(&pipelineSpec) platformSpecValue := structpb.NewStructValue(&platformSpec) @@ -453,7 +457,9 @@ func TestPipelineSpecStructToYamlString_NestedPipelineSpec(t *testing.T) { splitTemplate := strings.Split(template, "\n---\n") pipelineSpecJson, _ := yaml.YAMLToJSON([]byte(splitTemplate[0])) - protojson.Unmarshal(pipelineSpecJson, &pipelineSpec) + err := protojson.Unmarshal(pipelineSpecJson, &pipelineSpec) + assert.Nil(t, err) + pipelineSpecValue := structpb.NewStructValue(&pipelineSpec) pipeline := structpb.Struct{ diff --git a/backend/src/apiserver/server/artifact_server.go b/backend/src/apiserver/server/artifact_server.go new file mode 100644 index 00000000000..e773b46a72a --- /dev/null +++ b/backend/src/apiserver/server/artifact_server.go @@ -0,0 +1,590 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + "context" + + apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" + "github.com/kubeflow/pipelines/backend/src/apiserver/model" + "github.com/kubeflow/pipelines/backend/src/apiserver/resource" + "github.com/kubeflow/pipelines/backend/src/common/util" + authorizationv1 "k8s.io/api/authorization/v1" +) + +type ArtifactServer struct { + resourceManager *resource.ResourceManager + apiv2beta1.UnimplementedArtifactServiceServer +} + +// NewArtifactServer creates a new ArtifactServer. +func NewArtifactServer(resourceManager *resource.ResourceManager) *ArtifactServer { + return &ArtifactServer{resourceManager: resourceManager} +} + +// CreateArtifact creates a new artifact. +func (s *ArtifactServer) CreateArtifact(ctx context.Context, request *apiv2beta1.CreateArtifactRequest) (*apiv2beta1.Artifact, error) { + err := s.validateCreateArtifactRequest(request) + if err != nil { + return nil, util.Wrap(err, "Failed to create artifact due to validation error") + } + + // Extract namespace for authorization + namespace := s.resourceManager.ReplaceNamespace(request.GetArtifact().GetNamespace()) + + // Check authorization - artifacts are accessible if user can access runs in the namespace + resourceAttributes := &authorizationv1.ResourceAttributes{ + Namespace: namespace, + Verb: common.RbacResourceVerbCreate, + } + if err = s.canAccessArtifacts(ctx, "", resourceAttributes); err != nil { + return nil, util.Wrap(err, "Failed to authorize the request") + } + + task, err := s.resourceManager.GetTask(request.GetTaskId()) + if err != nil { + return nil, util.Wrap(err, "Failed to get task") + } + if task.RunUUID != request.GetRunId() { + return nil, util.NewInvalidInputError("Task ID does not belong to this Run ID") + } + + modelArtifact, err := toModelArtifact(request.GetArtifact()) + if err != nil { + return nil, util.Wrap(err, "Failed to create artifact due to conversion error") + } + + // Set the validated namespace + modelArtifact.Namespace = namespace + + artifact, err := s.resourceManager.CreateArtifact(modelArtifact) + if err != nil { + return nil, util.Wrap(err, "Failed to create artifact") + } + + // Build the IOProducer with task name + producer := &apiv2beta1.IOProducer{ + TaskName: task.Name, + } + // Add iteration index if provided + if request.IterationIndex != nil { + producer.Iteration = request.IterationIndex + } + + artifactTask := &apiv2beta1.ArtifactTask{ + ArtifactId: artifact.UUID, + TaskId: task.UUID, + RunId: request.GetRunId(), + // An artifact at creation is an output of the associated task. + Type: apiv2beta1.IOType_OUTPUT, + Producer: producer, + Key: request.GetProducerKey(), + } + + modelAT, err := toModelArtifactTask(artifactTask) + if err != nil { + return nil, util.Wrap(err, "Failed to convert artifact_task") + } + + _, err = s.resourceManager.CreateArtifactTask(modelAT) + if err != nil { + return nil, util.Wrap(err, "Failed to create artifact-task") + } + + return toAPIArtifact(artifact) +} + +// CreateArtifactsBulk creates multiple artifacts in bulk. +func (s *ArtifactServer) CreateArtifactsBulk(ctx context.Context, request *apiv2beta1.CreateArtifactsBulkRequest) (*apiv2beta1.CreateArtifactsBulkResponse, error) { + if request == nil || len(request.GetArtifacts()) == 0 { + return nil, util.NewInvalidInputError("CreateArtifactsBulkRequest must contain at least one artifact") + } + + response := &apiv2beta1.CreateArtifactsBulkResponse{ + Artifacts: make([]*apiv2beta1.Artifact, 0, len(request.GetArtifacts())), + } + + // Validate and create each artifact + for i, artifactReq := range request.GetArtifacts() { + err := s.validateCreateArtifactRequest(artifactReq) + if err != nil { + return nil, util.Wrapf(err, "Failed to create artifact %d due to validation error", i) + } + + // Extract namespace for authorization + namespace := s.resourceManager.ReplaceNamespace(artifactReq.GetArtifact().GetNamespace()) + + // Check authorization - artifacts are accessible if user can access runs in the namespace + resourceAttributes := &authorizationv1.ResourceAttributes{ + Namespace: namespace, + Verb: common.RbacResourceVerbCreate, + } + if err = s.canAccessArtifacts(ctx, "", resourceAttributes); err != nil { + return nil, util.Wrapf(err, "Failed to authorize artifact %d creation", i) + } + + task, err := s.resourceManager.GetTask(artifactReq.GetTaskId()) + if err != nil { + return nil, util.Wrapf(err, "Failed to get task for artifact %d", i) + } + if task.RunUUID != artifactReq.GetRunId() { + return nil, util.NewInvalidInputError("Task ID does not belong to this Run ID for artifact %d", i) + } + + modelArtifact, err := toModelArtifact(artifactReq.GetArtifact()) + if err != nil { + return nil, util.Wrapf(err, "Failed to create artifact %d due to conversion error", i) + } + + // Set the validated namespace + modelArtifact.Namespace = namespace + + artifact, err := s.resourceManager.CreateArtifact(modelArtifact) + if err != nil { + return nil, util.Wrapf(err, "Failed to create artifact %d", i) + } + + // Build the IOProducer with task name + producer := &apiv2beta1.IOProducer{ + TaskName: task.Name, + } + // Add iteration index if provided + if artifactReq.IterationIndex != nil { + producer.Iteration = artifactReq.IterationIndex + } + + artifactTask := &apiv2beta1.ArtifactTask{ + ArtifactId: artifact.UUID, + TaskId: task.UUID, + RunId: artifactReq.GetRunId(), + // An artifact at creation is an output of the associated task. + Type: apiv2beta1.IOType_OUTPUT, + Producer: producer, + Key: artifactReq.GetProducerKey(), + } + + modelAT, err := toModelArtifactTask(artifactTask) + if err != nil { + return nil, util.Wrapf(err, "Failed to convert artifact_task for artifact %d", i) + } + + _, err = s.resourceManager.CreateArtifactTask(modelAT) + if err != nil { + return nil, util.Wrapf(err, "Failed to create artifact-task for artifact %d", i) + } + + apiArtifact, err := toAPIArtifact(artifact) + if err != nil { + return nil, util.Wrapf(err, "Failed to convert artifact %d to API", i) + } + response.Artifacts = append(response.Artifacts, apiArtifact) + } + + return response, nil +} + +// GetArtifact finds a specific artifact by ID. +func (s *ArtifactServer) GetArtifact(ctx context.Context, request *apiv2beta1.GetArtifactRequest) (*apiv2beta1.Artifact, error) { + artifactID := request.GetArtifactId() + if artifactID == "" { + return nil, util.NewInvalidInputError("Artifact ID is required") + } + + artifact, err := s.resourceManager.GetArtifact(artifactID) + if err != nil { + return nil, util.Wrap(err, "Failed to get artifact") + } + + // Check authorization using the artifact's namespace + resourceAttributes := &authorizationv1.ResourceAttributes{ + Namespace: artifact.Namespace, + Verb: common.RbacResourceVerbGet, + } + if err = s.canAccessArtifacts(ctx, artifactID, resourceAttributes); err != nil { + return nil, util.Wrap(err, "Failed to authorize the request") + } + + return toAPIArtifact(artifact) +} + +// ListArtifacts finds all artifacts within the specified namespace. +func (s *ArtifactServer) ListArtifacts(ctx context.Context, request *apiv2beta1.ListArtifactRequest) (*apiv2beta1.ListArtifactResponse, error) { + opts, err := validatedListOptions(&model.Artifact{}, request.PageToken, int(request.PageSize), request.SortBy, request.Filter, "v2beta1") + if err != nil { + return nil, util.Wrap(err, "Failed to create list options") + } + + // Handle namespace and authorization + namespace := s.resourceManager.ReplaceNamespace(request.GetNamespace()) + + // Check authorization + resourceAttributes := &authorizationv1.ResourceAttributes{ + Namespace: namespace, + Verb: common.RbacResourceVerbList, + } + if err = s.canAccessArtifacts(ctx, "", resourceAttributes); err != nil { + return nil, util.Wrap(err, "Failed to authorize the request") + } + + filterContext, err := validateFilterV2Beta1Artifact(namespace) + if err != nil { + return nil, util.Wrap(err, "Validating filter failed") + } + + artifacts, totalSize, nextPageToken, err := s.resourceManager.ListArtifacts([]*model.FilterContext{filterContext}, opts) + if err != nil { + return nil, util.Wrap(err, "List artifacts failed") + } + + return &apiv2beta1.ListArtifactResponse{ + Artifacts: toAPIArtifacts(artifacts), + TotalSize: int32(totalSize), + NextPageToken: nextPageToken, + }, nil +} + +// CreateArtifactTask creates an artifact-task relationship. +func (s *ArtifactServer) CreateArtifactTask(ctx context.Context, request *apiv2beta1.CreateArtifactTaskRequest) (*apiv2beta1.ArtifactTask, error) { + if request == nil || request.GetArtifactTask() == nil { + return nil, util.NewInvalidInputError("CreateArtifactTaskRequest and artifact_task are required") + } + at := request.GetArtifactTask() + if at.GetArtifactId() == "" { + return nil, util.NewInvalidInputError("artifact_task.artifact_id is required") + } + if at.GetTaskId() == "" { + return nil, util.NewInvalidInputError("artifact_task.task_id is required") + } + if at.GetRunId() == "" { + return nil, util.NewInvalidInputError("artifact_task.run_id is required") + } + if at.GetType() == apiv2beta1.IOType_UNSPECIFIED { + return nil, util.NewInvalidInputError("artifact_task.type is required") + } + if at.GetProducer() == nil { + return nil, util.NewInvalidInputError("artifact_task.producer is required") + } + if at.GetKey() == "" { + return nil, util.NewInvalidInputError("artifact_task.key is required") + } + + // Fetch task and artifact for validation and authorization + task, err := s.resourceManager.GetTask(at.GetTaskId()) + if err != nil { + return nil, util.Wrap(err, "Failed to fetch task for CreateArtifactTask") + } + artifact, err := s.resourceManager.GetArtifact(at.GetArtifactId()) + if err != nil { + return nil, util.Wrap(err, "Failed to fetch artifact for CreateArtifactTask") + } + + // Optional: enforce same-namespace linkage + if common.IsMultiUserMode() && task.Namespace != "" && artifact.Namespace != "" && task.Namespace != artifact.Namespace { + return nil, util.NewInvalidInputError("artifact and task must be in the same namespace: artifact=%s task=%s", artifact.Namespace, task.Namespace) + } + + // Authorize create in the task's namespace + resourceAttributes := &authorizationv1.ResourceAttributes{ + Namespace: task.Namespace, + Verb: common.RbacResourceVerbCreate, + } + if err = s.canAccessArtifacts(ctx, "", resourceAttributes); err != nil { + return nil, util.Wrap(err, "Failed to authorize the request") + } + + modelAT, err := toModelArtifactTask(at) + if err != nil { + return nil, util.Wrap(err, "Failed to convert artifact_task") + } + + created, err := s.resourceManager.CreateArtifactTask(modelAT) + if err != nil { + return nil, util.Wrap(err, "Failed to create artifact-task") + } + return toAPIArtifactTask(created), nil +} + +// ListArtifactTasks lists artifact-task relationships. +func (s *ArtifactServer) ListArtifactTasks(ctx context.Context, request *apiv2beta1.ListArtifactTasksRequest) (*apiv2beta1.ListArtifactTasksResponse, error) { + opts, err := validatedListOptions(&model.ArtifactTask{}, request.PageToken, int(request.PageSize), request.SortBy, request.Filter, "v2beta1") + if err != nil { + return nil, util.Wrap(err, "Failed to create list options") + } + + // Authorization check - we need to verify access to the runs/namespaces involved + // For now, require at least one filter to determine namespace context + if len(request.TaskIds) == 0 && len(request.RunIds) == 0 && len(request.ArtifactIds) == 0 { + return nil, util.NewInvalidInputError("At least one filter (task_ids, run_ids, or artifact_ids) is required") + } + + // Check authorization based on provided filters + err = s.authorizeArtifactTaskAccess(ctx, request.TaskIds, request.RunIds, request.ArtifactIds) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize the request") + } + + filterContexts, err := validateFilterV2Beta1ArtifactTask(request.TaskIds, request.RunIds, request.ArtifactIds) + if err != nil { + return nil, util.Wrap(err, "Validating filter failed") + } + + // Convert IOType from proto to model if provided + var ioType *model.IOType + if request.Type != apiv2beta1.IOType_UNSPECIFIED { + modelIOType := model.IOType(request.Type) + ioType = &modelIOType + } + + artifactTasks, totalSize, nextPageToken, err := s.resourceManager.ListArtifactTasks(filterContexts, ioType, opts) + if err != nil { + return nil, util.Wrap(err, "List artifact tasks failed") + } + + return &apiv2beta1.ListArtifactTasksResponse{ + ArtifactTasks: toAPIArtifactTasks(artifactTasks), + TotalSize: int32(totalSize), + NextPageToken: nextPageToken, + }, nil +} + +// CreateArtifactTasksBulk creates multiple artifact-task relationships in bulk. +func (s *ArtifactServer) CreateArtifactTasksBulk(ctx context.Context, request *apiv2beta1.CreateArtifactTasksBulkRequest) (*apiv2beta1.CreateArtifactTasksBulkResponse, error) { + if request == nil || len(request.GetArtifactTasks()) == 0 { + return nil, util.NewInvalidInputError("CreateArtifactTasksBulkRequest must contain at least one artifact task") + } + + // Validate all artifact tasks and check authorization + modelArtifactTasks := make([]*model.ArtifactTask, 0, len(request.GetArtifactTasks())) + for _, apiAT := range request.GetArtifactTasks() { + if apiAT.GetArtifactId() == "" { + return nil, util.NewInvalidInputError("artifact_task.artifact_id is required") + } + if apiAT.GetTaskId() == "" { + return nil, util.NewInvalidInputError("artifact_task.task_id is required") + } + if apiAT.GetRunId() == "" { + return nil, util.NewInvalidInputError("artifact_task.run_id is required") + } + + // Fetch task and artifact for validation and authorization + task, err := s.resourceManager.GetTask(apiAT.GetTaskId()) + if err != nil { + return nil, util.Wrap(err, "Failed to fetch task for CreateArtifactTasksBulk") + } + artifact, err := s.resourceManager.GetArtifact(apiAT.GetArtifactId()) + if err != nil { + return nil, util.Wrap(err, "Failed to fetch artifact for CreateArtifactTasksBulk") + } + + // Optional: enforce same-namespace linkage + if common.IsMultiUserMode() && task.Namespace != "" && artifact.Namespace != "" && task.Namespace != artifact.Namespace { + return nil, util.NewInvalidInputError("artifact and task must be in the same namespace: artifact=%s task=%s", artifact.Namespace, task.Namespace) + } + + // Authorize create in the task's namespace + resourceAttributes := &authorizationv1.ResourceAttributes{ + Namespace: task.Namespace, + Verb: common.RbacResourceVerbCreate, + } + if err = s.canAccessArtifacts(ctx, "", resourceAttributes); err != nil { + return nil, util.Wrap(err, "Failed to authorize the request") + } + + modelAT, err := toModelArtifactTask(apiAT) + if err != nil { + return nil, util.Wrap(err, "Failed to convert artifact_task") + } + modelArtifactTasks = append(modelArtifactTasks, modelAT) + } + + // Create all artifact tasks in bulk + createdArtifactTasks, err := s.resourceManager.CreateArtifactTasks(modelArtifactTasks) + if err != nil { + return nil, util.Wrap(err, "Failed to create artifact-tasks in bulk") + } + + return &apiv2beta1.CreateArtifactTasksBulkResponse{ + ArtifactTasks: toAPIArtifactTasks(createdArtifactTasks), + }, nil +} + +// Authorization helper functions + +// canAccessRun checks if the user can access runs in the given namespace +// Following the same pattern as BaseRunServer.canAccessRun +func (s *ArtifactServer) canAccessRun(ctx context.Context, runID string, resourceAttributes *authorizationv1.ResourceAttributes) error { + if !common.IsMultiUserMode() { + // Skip authz if not multi-user mode. + return nil + } + + if runID != "" { + run, err := s.resourceManager.GetRun(runID) + if err != nil { + return util.Wrapf(err, "Failed to authorize with the run ID %v", runID) + } + if s.resourceManager.IsEmptyNamespace(run.Namespace) { + experiment, err := s.resourceManager.GetExperiment(run.ExperimentId) + if err != nil { + return util.NewInvalidInputError("run %v has an empty namespace and the parent experiment %v could not be fetched: %s", runID, run.ExperimentId, err.Error()) + } + resourceAttributes.Namespace = experiment.Namespace + } else { + resourceAttributes.Namespace = run.Namespace + } + if resourceAttributes.Name == "" { + resourceAttributes.Name = run.K8SName + } + } + + if s.resourceManager.IsEmptyNamespace(resourceAttributes.Namespace) { + return util.NewInvalidInputError("A resource cannot have an empty namespace in multi-user mode") + } + + resourceAttributes.Group = common.RbacPipelinesGroup + resourceAttributes.Version = common.RbacPipelinesVersion + resourceAttributes.Resource = common.RbacResourceTypeRuns + err := s.resourceManager.IsAuthorized(ctx, resourceAttributes) + if err != nil { + return util.Wrapf(err, "Failed to access resource. Check if you have access to namespace %s", resourceAttributes.Namespace) + } + return nil +} + +func (s *ArtifactServer) canAccessArtifacts(ctx context.Context, artifactID string, resourceAttributes *authorizationv1.ResourceAttributes) error { + if !common.IsMultiUserMode() { + // Skip authz if not multi-user mode. + return nil + } + + if artifactID != "" { + artifact, err := s.resourceManager.GetArtifact(artifactID) + if err != nil { + return util.Wrapf(err, "Failed to authorize with the artifact ID %v", artifactID) + } + if s.resourceManager.IsEmptyNamespace(artifact.Namespace) { + return util.NewInvalidInputError("artifact %v has an empty namespace", artifactID) + } + resourceAttributes.Namespace = artifact.Namespace + } + + if s.resourceManager.IsEmptyNamespace(resourceAttributes.Namespace) { + return util.NewInvalidInputError("A resource cannot have an empty namespace in multi-user mode") + } + + resourceAttributes.Group = common.RbacPipelinesGroup + resourceAttributes.Version = common.RbacPipelinesVersion + resourceAttributes.Resource = common.RbacResourceTypeArtifacts + err := s.resourceManager.IsAuthorized(ctx, resourceAttributes) + if err != nil { + return util.Wrapf(err, "Failed to access resource. Check if you have access to namespace %s", resourceAttributes.Namespace) + } + return nil +} + +// authorizeArtifactTaskAccess authorizes access to artifact-task relationships +// TODO(HumairAK): Make this more efficient by doing bulk calls to the database, +// and aggregating namespaces down to unique namespace calls +func (s *ArtifactServer) authorizeArtifactTaskAccess(ctx context.Context, taskIDs, runIDs, artifactIDs []string) error { + // Check authorization for run IDs (direct access) + for _, runID := range runIDs { + resourceAttributes := &authorizationv1.ResourceAttributes{ + Verb: common.RbacResourceVerbGet, + } + if err := s.canAccessRun(ctx, runID, resourceAttributes); err != nil { + return err + } + } + + // Check authorization for task IDs (get namespace from task) + for _, taskID := range taskIDs { + task, err := s.resourceManager.GetTask(taskID) + if err != nil { + return util.Wrap(err, "Failed to get task for authorization") + } + resourceAttributes := &authorizationv1.ResourceAttributes{ + Namespace: task.Namespace, + Verb: common.RbacResourceVerbGet, + } + if err = s.canAccessRun(ctx, "", resourceAttributes); err != nil { + return err + } + } + + // Check authorization for artifact IDs (get namespace from artifact) + for _, artifactID := range artifactIDs { + artifact, err := s.resourceManager.GetArtifact(artifactID) + if err != nil { + return util.Wrap(err, "Failed to get artifact for authorization") + } + resourceAttributes := &authorizationv1.ResourceAttributes{ + Namespace: artifact.Namespace, + Verb: common.RbacResourceVerbGet, + } + if err = s.canAccessRun(ctx, "", resourceAttributes); err != nil { + return err + } + } + return nil +} + +func (s *ArtifactServer) validateCreateArtifactRequest(request *apiv2beta1.CreateArtifactRequest) error { + if request == nil { + return util.NewInvalidInputError("CreateArtifactRequest is nil") + } + artifact := request.GetArtifact() + if artifact == nil { + return util.NewInvalidInputError("Artifact is required") + } + if artifact.GetArtifactId() != "" { + return util.NewInvalidInputError("Artifact ID should not be set on create") + } + if artifact.GetNamespace() == "" { + return util.NewInvalidInputError("Artifact namespace is required") + } + if request.GetArtifact().GetType() == apiv2beta1.Artifact_TYPE_UNSPECIFIED { + return util.NewInvalidInputError("Artifact type is required") + } + if request.GetArtifact().GetName() == "" { + return util.NewInvalidInputError("Artifact name is required") + } + if request.GetRunId() == "" { + return util.NewInvalidInputError("Run ID is required") + } + if request.GetTaskId() == "" { + return util.NewInvalidInputError("Task ID is required") + } + if request.GetProducerKey() == "" { + return util.NewInvalidInputError("Producer key is required") + } + // Metrics validation + if request.GetArtifact().GetType() == apiv2beta1.Artifact_Metric && + request.GetArtifact().NumberValue == nil { + return util.NewInvalidInputError("number_value is required for a Metric artifact") + } + if (request.GetArtifact().GetType() == apiv2beta1.Artifact_ClassificationMetric || + request.GetArtifact().GetType() == apiv2beta1.Artifact_SlicedClassificationMetric) && + request.GetArtifact().GetMetadata() == nil { + return util.NewInvalidInputError("No metric or metadata was found for %s artifact", request.GetArtifact().GetType()) + } + if request.GetType() == apiv2beta1.IOType_UNSPECIFIED { + return util.NewInvalidInputError("Artifact type is required") + } + if request.GetProducerKey() == "" { + return util.NewInvalidInputError("Producer key is required") + } + return nil +} diff --git a/backend/src/apiserver/server/artifact_server_test.go b/backend/src/apiserver/server/artifact_server_test.go new file mode 100644 index 00000000000..82aee8d92a7 --- /dev/null +++ b/backend/src/apiserver/server/artifact_server_test.go @@ -0,0 +1,1038 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + "context" + "testing" + + apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" + "github.com/kubeflow/pipelines/backend/src/apiserver/model" + "github.com/kubeflow/pipelines/backend/src/apiserver/resource" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/spf13/viper" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/metadata" +) + +const ( + runid1 = "123e4567-e89b-12d3-a456-426655440001" +) + +func createArtifactServer(resourceManager *resource.ResourceManager) *ArtifactServer { + return &ArtifactServer{resourceManager: resourceManager} +} + +// ctxWithUser returns a context with a fake user identity header so that +// authorization in multi-user mode passes in tests. +func ctxWithUser() context.Context { + header := common.GetKubeflowUserIDHeader() + prefix := common.GetKubeflowUserIDPrefix() + // Typical header value is like: "accounts.google.com:alice@example.com" + val := prefix + "test-user@example.com" + md := metadata.New(map[string]string{header: val}) + return metadata.NewIncomingContext(context.Background(), md) +} + +func strPTR(s string) *string { + return &s +} + +func TestArtifactServer_CreateArtifact_MultiUserCreateAndGet_Succeeds(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) + s := createArtifactServer(resourceManager) + + // Create run + _, err := clientManager.RunStore().CreateRun(&model.Run{ + UUID: runid1, + K8SName: "test-run", + DisplayName: "test-run", + StorageState: model.StorageStateAvailable, + Namespace: "ns1", + RunDetails: model.RunDetails{ + CreatedAtInSec: 1, + ScheduledAtInSec: 1, + State: model.RuntimeStateRunning, + }, + }) + assert.NoError(t, err) + + // Create task for the run + task, err := clientManager.TaskStore().CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: runid1, + Name: "test-task", + State: 1, + }) + assert.NoError(t, err) + + req := &apiv2beta1.CreateArtifactRequest{ + RunId: runid1, + TaskId: task.UUID, + ProducerKey: "producer-key", + Type: apiv2beta1.IOType_RUNTIME_VALUE_INPUT, + Artifact: &apiv2beta1.Artifact{ + Namespace: "ns1", + Type: apiv2beta1.Artifact_Model, + Uri: strPTR("gs://b/f"), + Name: "a1", + Description: "desc1", + }} + created, err := s.CreateArtifact(ctxWithUser(), req) + assert.NoError(t, err) + assert.NotEmpty(t, created.GetArtifactId()) + assert.Equal(t, "ns1", created.GetNamespace()) + assert.Equal(t, apiv2beta1.Artifact_Model, created.GetType()) + assert.Equal(t, "gs://b/f", created.GetUri()) + assert.Equal(t, "a1", created.GetName()) + assert.Equal(t, "desc1", created.GetDescription()) + + // Creating an artifact should create an artifact task + // Fetch the artifact task + artifactTasks, err := s.ListArtifactTasks(ctxWithUser(), &apiv2beta1.ListArtifactTasksRequest{ + TaskIds: []string{task.UUID}, + PageSize: 10, + }) + assert.NoError(t, err) + assert.Equal(t, int32(1), artifactTasks.GetTotalSize()) + assert.Equal(t, 1, len(artifactTasks.GetArtifactTasks())) + + at := artifactTasks.GetArtifactTasks()[0] + assert.Equal(t, created.GetArtifactId(), at.GetArtifactId()) + assert.Equal(t, task.UUID, at.GetTaskId()) + assert.Equal(t, apiv2beta1.IOType_OUTPUT, at.GetType()) + assert.NotNil(t, at.GetProducer()) + assert.Equal(t, task.Name, at.GetProducer().GetTaskName()) + assert.Equal(t, "producer-key", at.GetKey()) + +} + +func TestArtifactServer_CreateArtifact_WithIterationIndex(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) + s := createArtifactServer(resourceManager) + + // Create run + _, err := clientManager.RunStore().CreateRun(&model.Run{ + UUID: runid1, + K8SName: "iteration-run", + DisplayName: "iteration-run", + StorageState: model.StorageStateAvailable, + Namespace: "ns1", + RunDetails: model.RunDetails{ + CreatedAtInSec: 1, + ScheduledAtInSec: 1, + State: model.RuntimeStateRunning, + }, + }) + assert.NoError(t, err) + + // Create task for the run + task, err := clientManager.TaskStore().CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: runid1, + Name: "iteration-task", + State: 1, + }) + assert.NoError(t, err) + + // Create artifact with iteration_index + iterationIndex := int64(5) + req := &apiv2beta1.CreateArtifactRequest{ + RunId: runid1, + TaskId: task.UUID, + ProducerKey: "output-artifact", + IterationIndex: &iterationIndex, + Type: apiv2beta1.IOType_RUNTIME_VALUE_INPUT, + Artifact: &apiv2beta1.Artifact{ + Namespace: "ns1", + Type: apiv2beta1.Artifact_Dataset, + Uri: strPTR("gs://bucket/iteration-5/data"), + Name: "iteration-dataset", + Description: "Dataset from iteration 5", + }} + created, err := s.CreateArtifact(ctxWithUser(), req) + assert.NoError(t, err) + assert.NotEmpty(t, created.GetArtifactId()) + assert.Equal(t, "ns1", created.GetNamespace()) + assert.Equal(t, apiv2beta1.Artifact_Dataset, created.GetType()) + assert.Equal(t, "iteration-dataset", created.GetName()) + + // Verify the artifact task was created with iteration in producer + artifactTasks, err := s.ListArtifactTasks(ctxWithUser(), &apiv2beta1.ListArtifactTasksRequest{ + TaskIds: []string{task.UUID}, + PageSize: 10, + }) + assert.NoError(t, err) + assert.Equal(t, int32(1), artifactTasks.GetTotalSize()) + assert.Equal(t, 1, len(artifactTasks.GetArtifactTasks())) + + at := artifactTasks.GetArtifactTasks()[0] + assert.Equal(t, created.GetArtifactId(), at.GetArtifactId()) + assert.Equal(t, task.UUID, at.GetTaskId()) + assert.Equal(t, apiv2beta1.IOType_OUTPUT, at.GetType()) + assert.Equal(t, "output-artifact", at.GetKey()) + assert.NotNil(t, at.GetProducer()) + assert.Equal(t, task.Name, at.GetProducer().GetTaskName()) + // Verify iteration was set + assert.NotNil(t, at.GetProducer().Iteration) + assert.Equal(t, int64(5), *at.GetProducer().Iteration) +} + +func TestArtifactServer_ListArtifacts_HappyPath(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) + s := createArtifactServer(resourceManager) + + // Create required run and task for artifact creation + _, err := clientManager.RunStore().CreateRun(&model.Run{ + UUID: runid1, + K8SName: "list-run", + DisplayName: "list-run", + StorageState: model.StorageStateAvailable, + Namespace: "ns1", + RunDetails: model.RunDetails{CreatedAtInSec: 1, ScheduledAtInSec: 1, State: model.RuntimeStateRunning}, + }) + assert.NoError(t, err) + listTask, err := clientManager.TaskStore().CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: runid1, + Name: "t-list", + State: 1, + }) + assert.NoError(t, err) + _, err = s.CreateArtifact(ctxWithUser(), + &apiv2beta1.CreateArtifactRequest{ + RunId: runid1, + TaskId: listTask.UUID, + ProducerKey: "producer-key", + Type: apiv2beta1.IOType_RUNTIME_VALUE_INPUT, + Artifact: &apiv2beta1.Artifact{ + Namespace: "ns1", + Type: apiv2beta1.Artifact_Model, + Uri: strPTR("gs://b/f"), + Name: "a1", + Description: "desc-list", + }, + }, + ) + require.NoError(t, err) + listResp, err := s.ListArtifacts(ctxWithUser(), &apiv2beta1.ListArtifactRequest{ + Namespace: "ns1", + PageSize: 10, + }) + assert.NoError(t, err) + assert.GreaterOrEqual(t, int(listResp.GetTotalSize()), 1) + assert.GreaterOrEqual(t, len(listResp.GetArtifacts()), 1) +} + +func TestArtifactServer_GetArtifact_Errors(t *testing.T) { + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) + s := createArtifactServer(resourceManager) + + // Missing ID + _, err := s.GetArtifact(context.Background(), &apiv2beta1.GetArtifactRequest{ArtifactId: ""}) + assert.Equal(t, codes.InvalidArgument, err.(*util.UserError).ExternalStatusCode()) + + // Non-existent + _, err = s.GetArtifact(context.Background(), &apiv2beta1.GetArtifactRequest{ArtifactId: "does-not-exist"}) + assert.Equal(t, codes.NotFound, err.(*util.UserError).ExternalStatusCode()) +} + +func TestArtifactServer_Authorization_MultiUser(t *testing.T) { + // Turn on MU mode by setting viper flag + // Note: IsMultiUserMode() reads from viper, so configure it here + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) + // In MU, Create should preserve namespace; and List with empty namespace should fail + s := createArtifactServer(resourceManager) + + // By default FakeResourceManager authorizes everything in MU, unless namespace is empty + // ListArtifacts with empty namespace should fail in MU + _, err := s.ListArtifacts(ctxWithUser(), &apiv2beta1.ListArtifactRequest{Namespace: ""}) + assert.Equal(t, codes.InvalidArgument, err.(*util.UserError).ExternalStatusCode()) +} + +func TestArtifactServer_SingleUserNamespaceEmpty(t *testing.T) { + // Ensure single-user mode + viper.Set(common.MultiUserMode, "false") + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) + s := createArtifactServer(resourceManager) + + // Even if request carries a namespace, in single-user mode it should be cleared/empty in stored artifact + // Create run and task required by CreateArtifact + _, err := clientManager.RunStore().CreateRun(&model.Run{ + UUID: "single-run", + K8SName: "single-run", + DisplayName: "single-run", + StorageState: model.StorageStateAvailable, + Namespace: "ns1", + RunDetails: model.RunDetails{CreatedAtInSec: 1, ScheduledAtInSec: 1, State: model.RuntimeStateRunning}, + }) + assert.NoError(t, err) + singleTask, err := clientManager.TaskStore().CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: "single-run", + Name: "t-single", + State: 1, + }) + assert.NoError(t, err) + created, err := s.CreateArtifact(context.Background(), &apiv2beta1.CreateArtifactRequest{ + RunId: "single-run", + TaskId: singleTask.UUID, + ProducerKey: "producer-key", + Type: apiv2beta1.IOType_TASK_OUTPUT_INPUT, + Artifact: &apiv2beta1.Artifact{ + Namespace: "ns1", + Type: apiv2beta1.Artifact_Artifact, + Uri: strPTR("u"), + Name: "a", + Description: "single-desc", + }, + }) + assert.NoError(t, err) + assert.Equal(t, "", created.GetNamespace()) + + // Get artifact and verify it matches + fetched, err := s.GetArtifact(context.Background(), &apiv2beta1.GetArtifactRequest{ + ArtifactId: created.GetArtifactId(), + }) + assert.NoError(t, err) + assert.Equal(t, "", fetched.GetNamespace()) + assert.Equal(t, apiv2beta1.Artifact_Artifact, fetched.GetType()) + assert.Equal(t, "u", fetched.GetUri()) + assert.Equal(t, "a", fetched.GetName()) + assert.Equal(t, "single-desc", fetched.GetDescription()) +} + +const ( + serverRunID1 = "run-1" + serverRunID2 = "run-2" +) + +// seedArtifactTasks sets up two runs, two tasks, two artifacts and three links. +// Returns server, clientManager, entities. +func seedArtifactTasks(t *testing.T) (*ArtifactServer, *resource.FakeClientManager, *model.Task, *model.Task, *model.Artifact, *model.Artifact) { + viper.Set(common.MultiUserMode, "true") + t.Cleanup(func() { viper.Set(common.MultiUserMode, "false") }) + clientManager := resource.NewFakeClientManagerOrFatalV2() + resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) + s := createArtifactServer(resourceManager) + + // Runs + _, err := clientManager.RunStore().CreateRun(&model.Run{ + UUID: serverRunID1, + ExperimentId: "", + K8SName: "r1", + DisplayName: "r1", + StorageState: model.StorageStateAvailable, + Namespace: "ns1", + RunDetails: model.RunDetails{ + CreatedAtInSec: 1, + ScheduledAtInSec: 1, + State: model.RuntimeStateRunning, + }, + }) + assert.NoError(t, err) + _, err = clientManager.RunStore().CreateRun(&model.Run{ + UUID: serverRunID2, + ExperimentId: "", + K8SName: "r2", + DisplayName: "r2", + StorageState: model.StorageStateAvailable, + Namespace: "ns1", + RunDetails: model.RunDetails{ + CreatedAtInSec: 2, + ScheduledAtInSec: 2, + State: model.RuntimeStateRunning, + }, + }) + require.NoError(t, err) + + // Tasks + t1, err := clientManager.TaskStore().CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: serverRunID1, + Name: "t1", + State: 1, + }) + assert.NoError(t, err) + t2, err := clientManager.TaskStore().CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: serverRunID2, + Name: "t2", + State: 1, + }) + assert.NoError(t, err) + + // Artifacts + art1, err := clientManager.ArtifactStore().CreateArtifact(&model.Artifact{ + Namespace: "ns1", + Type: model.ArtifactType(apiv2beta1.Artifact_Artifact), + URI: strPTR("u"), + Name: "a1", + Description: "d1", + }) + assert.NoError(t, err) + art2, err := clientManager.ArtifactStore().CreateArtifact(&model.Artifact{ + Namespace: "ns1", + Type: model.ArtifactType(apiv2beta1.Artifact_Artifact), + URI: strPTR("u2"), + Name: "a2", + Description: "d2", + }) + assert.NoError(t, err) + + // Links + _, err = s.CreateArtifactTask(ctxWithUser(), &apiv2beta1.CreateArtifactTaskRequest{ + ArtifactTask: &apiv2beta1.ArtifactTask{ + ArtifactId: art1.UUID, + TaskId: t1.UUID, + RunId: serverRunID1, + Type: apiv2beta1.IOType_COMPONENT_INPUT, + Producer: &apiv2beta1.IOProducer{ + TaskName: "t1", + }, + Key: "in1", + }, + }) + assert.NoError(t, err) + _, err = s.CreateArtifactTask(ctxWithUser(), &apiv2beta1.CreateArtifactTaskRequest{ + ArtifactTask: &apiv2beta1.ArtifactTask{ + ArtifactId: art2.UUID, + TaskId: t1.UUID, + RunId: serverRunID1, + Type: apiv2beta1.IOType_OUTPUT, + Producer: &apiv2beta1.IOProducer{ + TaskName: "t1", + }, + Key: "out1", + }, + }) + assert.NoError(t, err) + _, err = s.CreateArtifactTask(ctxWithUser(), &apiv2beta1.CreateArtifactTaskRequest{ + ArtifactTask: &apiv2beta1.ArtifactTask{ + ArtifactId: art2.UUID, + TaskId: t2.UUID, + RunId: serverRunID2, + Type: apiv2beta1.IOType_COMPONENT_INPUT, + Producer: &apiv2beta1.IOProducer{ + TaskName: "t2", + }, + Key: "in2", + }, + }) + assert.NoError(t, err) + + return s, clientManager, t1, t2, art1, art2 +} + +func TestArtifactServer_ListArtifactTasks_FilterByTaskIds(t *testing.T) { + s, _, t1, _, _, _ := seedArtifactTasks(t) + resp, err := s.ListArtifactTasks(ctxWithUser(), &apiv2beta1.ListArtifactTasksRequest{TaskIds: []string{t1.UUID}, PageSize: 50}) + assert.NoError(t, err) + assert.Equal(t, int32(2), resp.GetTotalSize()) + assert.Equal(t, 2, len(resp.GetArtifactTasks())) + // Ensure key values are present + keys := []string{resp.GetArtifactTasks()[0].GetKey(), resp.GetArtifactTasks()[1].GetKey()} + assert.Contains(t, keys, "in1") + assert.Contains(t, keys, "out1") + assert.Empty(t, resp.GetNextPageToken()) +} + +func TestArtifactServer_ListArtifactTasks_FilterByArtifactIds(t *testing.T) { + s, _, _, _, _, art2 := seedArtifactTasks(t) + resp, err := s.ListArtifactTasks(ctxWithUser(), &apiv2beta1.ListArtifactTasksRequest{ArtifactIds: []string{art2.UUID}, PageSize: 50}) + assert.NoError(t, err) + assert.Equal(t, int32(2), resp.GetTotalSize()) + assert.Equal(t, 2, len(resp.GetArtifactTasks())) + // Ensure key values are as expected for art2 links + keys := []string{resp.GetArtifactTasks()[0].GetKey(), resp.GetArtifactTasks()[1].GetKey()} + assert.Contains(t, keys, "out1") + assert.Contains(t, keys, "in2") +} + +func TestArtifactServer_ListArtifactTasks_FilterByRunIds(t *testing.T) { + s, _, _, t2, _, art2 := seedArtifactTasks(t) + resp, err := s.ListArtifactTasks(ctxWithUser(), &apiv2beta1.ListArtifactTasksRequest{RunIds: []string{serverRunID2}, PageSize: 50}) + assert.NoError(t, err) + assert.Equal(t, int32(1), resp.GetTotalSize()) + assert.Equal(t, 1, len(resp.GetArtifactTasks())) + at := resp.GetArtifactTasks()[0] + assert.Equal(t, art2.UUID, at.GetArtifactId()) + assert.Equal(t, t2.UUID, at.GetTaskId()) + assert.Equal(t, "in2", at.GetKey()) +} + +func TestArtifactServer_ListArtifactTasks_ErrorWhenNoFilters(t *testing.T) { + s, _, _, _, _, _ := seedArtifactTasks(t) + _, err := s.ListArtifactTasks(ctxWithUser(), &apiv2beta1.ListArtifactTasksRequest{PageSize: 2}) + assert.Error(t, err) +} + +func TestArtifactServer_ListArtifactTasks_Pagination_TaskIds(t *testing.T) { + s, _, t1, _, _, _ := seedArtifactTasks(t) + page1, err := s.ListArtifactTasks(ctxWithUser(), &apiv2beta1.ListArtifactTasksRequest{TaskIds: []string{t1.UUID}, PageSize: 1}) + assert.NoError(t, err) + assert.Equal(t, int32(2), page1.GetTotalSize()) + assert.Equal(t, 1, len(page1.GetArtifactTasks())) + assert.NotEmpty(t, page1.GetNextPageToken()) + + page2, err := s.ListArtifactTasks(ctxWithUser(), &apiv2beta1.ListArtifactTasksRequest{TaskIds: []string{t1.UUID}, PageToken: page1.GetNextPageToken(), PageSize: 1}) + assert.NoError(t, err) + assert.Equal(t, int32(2), page2.GetTotalSize()) + assert.Equal(t, 1, len(page2.GetArtifactTasks())) + assert.Empty(t, page2.GetNextPageToken()) + + id1 := page1.GetArtifactTasks()[0].GetId() + id2 := page2.GetArtifactTasks()[0].GetId() + assert.NotEqual(t, id1, id2) +} + +func TestArtifactServer_CreateArtifactTasksBulk_Success(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + clientManager := resource.NewFakeClientManagerOrFatalV2() + resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) + s := createArtifactServer(resourceManager) + + // Create a run + _, err := clientManager.RunStore().CreateRun(&model.Run{ + UUID: serverRunID1, + K8SName: "bulk-run", + DisplayName: "bulk-run", + StorageState: model.StorageStateAvailable, + Namespace: "ns1", + RunDetails: model.RunDetails{ + CreatedAtInSec: 1, + ScheduledAtInSec: 1, + State: model.RuntimeStateRunning, + }, + }) + assert.NoError(t, err) + + // Create tasks + t1, err := clientManager.TaskStore().CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: serverRunID1, + Name: "task1", + State: 1, + }) + assert.NoError(t, err) + + t2, err := clientManager.TaskStore().CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: serverRunID1, + Name: "task2", + State: 1, + }) + assert.NoError(t, err) + + // Create artifacts + art1, err := clientManager.ArtifactStore().CreateArtifact(&model.Artifact{ + Namespace: "ns1", + Type: model.ArtifactType(apiv2beta1.Artifact_Artifact), + URI: strPTR("uri1"), + Name: "artifact1", + }) + assert.NoError(t, err) + + art2, err := clientManager.ArtifactStore().CreateArtifact(&model.Artifact{ + Namespace: "ns1", + Type: model.ArtifactType(apiv2beta1.Artifact_Artifact), + URI: strPTR("uri2"), + Name: "artifact2", + }) + assert.NoError(t, err) + + // Create bulk artifact tasks + req := &apiv2beta1.CreateArtifactTasksBulkRequest{ + ArtifactTasks: []*apiv2beta1.ArtifactTask{ + { + ArtifactId: art1.UUID, + TaskId: t1.UUID, + RunId: serverRunID1, + Type: apiv2beta1.IOType_COMPONENT_INPUT, + Producer: &apiv2beta1.IOProducer{ + TaskName: "task1", + }, + Key: "input1", + }, + { + ArtifactId: art2.UUID, + TaskId: t1.UUID, + RunId: serverRunID1, + Type: apiv2beta1.IOType_OUTPUT, + Producer: &apiv2beta1.IOProducer{ + TaskName: "task1", + }, + Key: "output1", + }, + { + ArtifactId: art2.UUID, + TaskId: t2.UUID, + RunId: serverRunID1, + Type: apiv2beta1.IOType_COMPONENT_INPUT, + Producer: &apiv2beta1.IOProducer{ + TaskName: "task2", + }, + Key: "input2", + }, + }, + } + + resp, err := s.CreateArtifactTasksBulk(ctxWithUser(), req) + assert.NoError(t, err) + assert.NotNil(t, resp) + assert.Equal(t, 3, len(resp.GetArtifactTasks())) + + // Verify all artifact tasks were created + for i, at := range resp.GetArtifactTasks() { + assert.NotEmpty(t, at.GetId()) + assert.Equal(t, req.ArtifactTasks[i].GetArtifactId(), at.GetArtifactId()) + assert.Equal(t, req.ArtifactTasks[i].GetTaskId(), at.GetTaskId()) + assert.Equal(t, req.ArtifactTasks[i].GetRunId(), at.GetRunId()) + assert.Equal(t, req.ArtifactTasks[i].GetType(), at.GetType()) + assert.Equal(t, req.ArtifactTasks[i].GetKey(), at.GetKey()) + assert.Equal(t, req.ArtifactTasks[i].GetProducer().GetTaskName(), at.GetProducer().GetTaskName()) + } + + // Verify they can be listed + listResp, err := s.ListArtifactTasks(ctxWithUser(), &apiv2beta1.ListArtifactTasksRequest{ + TaskIds: []string{t1.UUID}, + PageSize: 10, + }) + assert.NoError(t, err) + assert.Equal(t, int32(2), listResp.GetTotalSize()) +} + +func TestArtifactServer_CreateArtifactTasksBulk_EmptyRequest(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + clientManager := resource.NewFakeClientManagerOrFatalV2() + resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) + s := createArtifactServer(resourceManager) + + // Empty request should fail with validation error + _, err := s.CreateArtifactTasksBulk(ctxWithUser(), &apiv2beta1.CreateArtifactTasksBulkRequest{ + ArtifactTasks: []*apiv2beta1.ArtifactTask{}, + }) + assert.Error(t, err) + assert.Equal(t, codes.InvalidArgument, err.(*util.UserError).ExternalStatusCode()) +} + +func TestArtifactServer_CreateArtifactTasksBulk_ValidationError(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + clientManager := resource.NewFakeClientManagerOrFatalV2() + resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) + s := createArtifactServer(resourceManager) + + // Request with invalid artifact task (missing required fields) + req := &apiv2beta1.CreateArtifactTasksBulkRequest{ + ArtifactTasks: []*apiv2beta1.ArtifactTask{ + { + ArtifactId: "art1", + // Missing TaskId, RunId, Type + }, + }, + } + + _, err := s.CreateArtifactTasksBulk(ctxWithUser(), req) + assert.Error(t, err) + assert.Equal(t, codes.InvalidArgument, err.(*util.UserError).ExternalStatusCode()) +} + +func TestArtifactServer_CreateArtifactsBulk_Success(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + clientManager := resource.NewFakeClientManagerOrFatalV2() + resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) + s := createArtifactServer(resourceManager) + + // Create run + _, err := clientManager.RunStore().CreateRun(&model.Run{ + UUID: runid1, + K8SName: "bulk-run", + DisplayName: "bulk-run", + StorageState: model.StorageStateAvailable, + Namespace: "ns1", + RunDetails: model.RunDetails{ + CreatedAtInSec: 1, + ScheduledAtInSec: 1, + State: model.RuntimeStateRunning, + }, + }) + assert.NoError(t, err) + + // Create three tasks for the run + task1, err := clientManager.TaskStore().CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: runid1, + Name: "task1", + State: 1, + }) + assert.NoError(t, err) + + task2, err := clientManager.TaskStore().CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: runid1, + Name: "task2", + State: 1, + }) + assert.NoError(t, err) + + task3, err := clientManager.TaskStore().CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: runid1, + Name: "task3", + State: 1, + }) + assert.NoError(t, err) + + // Create multiple artifacts in bulk + req := &apiv2beta1.CreateArtifactsBulkRequest{ + Artifacts: []*apiv2beta1.CreateArtifactRequest{ + { + RunId: runid1, + TaskId: task1.UUID, + ProducerKey: "output1", + Type: apiv2beta1.IOType_RUNTIME_VALUE_INPUT, + Artifact: &apiv2beta1.Artifact{ + Namespace: "ns1", + Type: apiv2beta1.Artifact_Model, + Uri: strPTR("gs://bucket/model1"), + Name: "model1", + Description: "First model", + }, + }, + { + RunId: runid1, + TaskId: task2.UUID, + ProducerKey: "output2", + Type: apiv2beta1.IOType_RUNTIME_VALUE_INPUT, + Artifact: &apiv2beta1.Artifact{ + Namespace: "ns1", + Type: apiv2beta1.Artifact_Dataset, + Uri: strPTR("gs://bucket/dataset1"), + Name: "dataset1", + Description: "First dataset", + }, + }, + { + RunId: runid1, + TaskId: task3.UUID, + ProducerKey: "output3", + Type: apiv2beta1.IOType_RUNTIME_VALUE_INPUT, + Artifact: &apiv2beta1.Artifact{ + Namespace: "ns1", + Type: apiv2beta1.Artifact_Metric, + Uri: strPTR("gs://bucket/metrics1"), + Name: "metrics1", + Description: "First metrics", + NumberValue: func() *float64 { v := 0.95; return &v }(), + }, + }, + }, + } + + resp, err := s.CreateArtifactsBulk(ctxWithUser(), req) + assert.NoError(t, err) + assert.NotNil(t, resp) + assert.Equal(t, 3, len(resp.GetArtifacts())) + + // Verify all artifacts were created correctly + artifact1 := resp.GetArtifacts()[0] + assert.NotEmpty(t, artifact1.GetArtifactId()) + assert.Equal(t, "ns1", artifact1.GetNamespace()) + assert.Equal(t, apiv2beta1.Artifact_Model, artifact1.GetType()) + assert.Equal(t, "gs://bucket/model1", artifact1.GetUri()) + assert.Equal(t, "model1", artifact1.GetName()) + assert.Equal(t, "First model", artifact1.GetDescription()) + + artifact2 := resp.GetArtifacts()[1] + assert.NotEmpty(t, artifact2.GetArtifactId()) + assert.Equal(t, "ns1", artifact2.GetNamespace()) + assert.Equal(t, apiv2beta1.Artifact_Dataset, artifact2.GetType()) + assert.Equal(t, "gs://bucket/dataset1", artifact2.GetUri()) + assert.Equal(t, "dataset1", artifact2.GetName()) + + artifact3 := resp.GetArtifacts()[2] + assert.NotEmpty(t, artifact3.GetArtifactId()) + assert.Equal(t, "ns1", artifact3.GetNamespace()) + assert.Equal(t, apiv2beta1.Artifact_Metric, artifact3.GetType()) + assert.Equal(t, "gs://bucket/metrics1", artifact3.GetUri()) + assert.Equal(t, "metrics1", artifact3.GetName()) + + // Verify artifact-task relationships were created for each + for i, artifact := range resp.GetArtifacts() { + artifactTasks, err := s.ListArtifactTasks(ctxWithUser(), &apiv2beta1.ListArtifactTasksRequest{ + ArtifactIds: []string{artifact.GetArtifactId()}, + PageSize: 10, + }) + assert.NoError(t, err) + assert.Equal(t, int32(1), artifactTasks.GetTotalSize()) + assert.Equal(t, 1, len(artifactTasks.GetArtifactTasks())) + + at := artifactTasks.GetArtifactTasks()[0] + assert.Equal(t, artifact.GetArtifactId(), at.GetArtifactId()) + assert.Equal(t, apiv2beta1.IOType_OUTPUT, at.GetType()) + assert.Equal(t, req.Artifacts[i].ProducerKey, at.GetKey()) + } + + // Verify artifacts can be listed + listResp, err := s.ListArtifacts(ctxWithUser(), &apiv2beta1.ListArtifactRequest{ + Namespace: "ns1", + PageSize: 10, + }) + assert.NoError(t, err) + assert.GreaterOrEqual(t, int(listResp.GetTotalSize()), 3) +} + +func TestArtifactServer_CreateArtifactsBulk_WithIterationIndex(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + clientManager := resource.NewFakeClientManagerOrFatalV2() + resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) + s := createArtifactServer(resourceManager) + + // Create run + _, err := clientManager.RunStore().CreateRun(&model.Run{ + UUID: runid1, + K8SName: "iteration-bulk-run", + DisplayName: "iteration-bulk-run", + StorageState: model.StorageStateAvailable, + Namespace: "ns1", + RunDetails: model.RunDetails{ + CreatedAtInSec: 1, + ScheduledAtInSec: 1, + State: model.RuntimeStateRunning, + }, + }) + assert.NoError(t, err) + + // Create task for the run + task, err := clientManager.TaskStore().CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: runid1, + Name: "iteration-task", + State: 1, + }) + assert.NoError(t, err) + + // Create artifacts with iteration indices + iter0 := int64(0) + iter1 := int64(1) + iter2 := int64(2) + + req := &apiv2beta1.CreateArtifactsBulkRequest{ + Artifacts: []*apiv2beta1.CreateArtifactRequest{ + { + RunId: runid1, + TaskId: task.UUID, + ProducerKey: "iteration-output", + IterationIndex: &iter0, + Type: apiv2beta1.IOType_RUNTIME_VALUE_INPUT, + Artifact: &apiv2beta1.Artifact{ + Namespace: "ns1", + Type: apiv2beta1.Artifact_Dataset, + Uri: strPTR("gs://bucket/iter-0"), + Name: "dataset-iter-0", + Description: "Dataset from iteration 0", + }, + }, + { + RunId: runid1, + TaskId: task.UUID, + ProducerKey: "iteration-output", + IterationIndex: &iter1, + Type: apiv2beta1.IOType_RUNTIME_VALUE_INPUT, + Artifact: &apiv2beta1.Artifact{ + Namespace: "ns1", + Type: apiv2beta1.Artifact_Dataset, + Uri: strPTR("gs://bucket/iter-1"), + Name: "dataset-iter-1", + Description: "Dataset from iteration 1", + }, + }, + { + RunId: runid1, + TaskId: task.UUID, + ProducerKey: "iteration-output", + IterationIndex: &iter2, + Type: apiv2beta1.IOType_RUNTIME_VALUE_INPUT, + Artifact: &apiv2beta1.Artifact{ + Namespace: "ns1", + Type: apiv2beta1.Artifact_Dataset, + Uri: strPTR("gs://bucket/iter-2"), + Name: "dataset-iter-2", + Description: "Dataset from iteration 2", + }, + }, + }, + } + + resp, err := s.CreateArtifactsBulk(ctxWithUser(), req) + assert.NoError(t, err) + assert.NotNil(t, resp) + assert.Equal(t, 3, len(resp.GetArtifacts())) + + // Verify all artifacts were created with correct iteration indices + for i, artifact := range resp.GetArtifacts() { + artifactTasks, err := s.ListArtifactTasks(ctxWithUser(), &apiv2beta1.ListArtifactTasksRequest{ + ArtifactIds: []string{artifact.GetArtifactId()}, + PageSize: 10, + }) + assert.NoError(t, err) + assert.Equal(t, int32(1), artifactTasks.GetTotalSize()) + + at := artifactTasks.GetArtifactTasks()[0] + assert.NotNil(t, at.GetProducer()) + assert.NotNil(t, at.GetProducer().Iteration) + assert.Equal(t, int64(i), *at.GetProducer().Iteration) + } +} + +func TestArtifactServer_CreateArtifactsBulk_EmptyRequest(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + clientManager := resource.NewFakeClientManagerOrFatalV2() + resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) + s := createArtifactServer(resourceManager) + + // Nil request should fail + _, err := s.CreateArtifactsBulk(ctxWithUser(), nil) + assert.Error(t, err) + assert.Equal(t, codes.InvalidArgument, err.(*util.UserError).ExternalStatusCode()) + assert.Contains(t, err.Error(), "must contain at least one artifact") + + // Empty artifacts list should fail + _, err = s.CreateArtifactsBulk(ctxWithUser(), &apiv2beta1.CreateArtifactsBulkRequest{ + Artifacts: []*apiv2beta1.CreateArtifactRequest{}, + }) + assert.Error(t, err) + assert.Equal(t, codes.InvalidArgument, err.(*util.UserError).ExternalStatusCode()) + assert.Contains(t, err.Error(), "must contain at least one artifact") +} + +func TestArtifactServer_CreateArtifactsBulk_ValidationErrors(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + clientManager := resource.NewFakeClientManagerOrFatalV2() + resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) + s := createArtifactServer(resourceManager) + + // Create run + _, err := clientManager.RunStore().CreateRun(&model.Run{ + UUID: runid1, + K8SName: "validation-run", + DisplayName: "validation-run", + StorageState: model.StorageStateAvailable, + Namespace: "ns1", + RunDetails: model.RunDetails{ + CreatedAtInSec: 1, + ScheduledAtInSec: 1, + State: model.RuntimeStateRunning, + }, + }) + assert.NoError(t, err) + + task, err := clientManager.TaskStore().CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: runid1, + Name: "validation-task", + State: 1, + }) + assert.NoError(t, err) + + // Test with missing artifact + _, err = s.CreateArtifactsBulk(ctxWithUser(), &apiv2beta1.CreateArtifactsBulkRequest{ + Artifacts: []*apiv2beta1.CreateArtifactRequest{ + { + RunId: runid1, + TaskId: task.UUID, + ProducerKey: "output", + Type: apiv2beta1.IOType_RUNTIME_VALUE_INPUT, + Artifact: nil, // Missing artifact! + }, + }, + }) + assert.Error(t, err) + assert.Equal(t, codes.InvalidArgument, err.(*util.UserError).ExternalStatusCode()) + assert.Contains(t, err.Error(), "Artifact is required") + + // Test with missing namespace + _, err = s.CreateArtifactsBulk(ctxWithUser(), &apiv2beta1.CreateArtifactsBulkRequest{ + Artifacts: []*apiv2beta1.CreateArtifactRequest{ + { + RunId: runid1, + TaskId: task.UUID, + ProducerKey: "output", + Type: apiv2beta1.IOType_RUNTIME_VALUE_INPUT, + Artifact: &apiv2beta1.Artifact{ + Namespace: "", // Missing namespace! + Type: apiv2beta1.Artifact_Model, + Name: "test", + }, + }, + }, + }) + assert.Error(t, err) + assert.Equal(t, codes.InvalidArgument, err.(*util.UserError).ExternalStatusCode()) + assert.Contains(t, err.Error(), "namespace is required") + + // Test with wrong run ID + otherTask, err := clientManager.TaskStore().CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: "other-run-id", + Name: "other-task", + State: 1, + }) + assert.NoError(t, err) + + _, err = s.CreateArtifactsBulk(ctxWithUser(), &apiv2beta1.CreateArtifactsBulkRequest{ + Artifacts: []*apiv2beta1.CreateArtifactRequest{ + { + RunId: runid1, + TaskId: otherTask.UUID, // Task belongs to different run! + ProducerKey: "output", + Type: apiv2beta1.IOType_RUNTIME_VALUE_INPUT, + Artifact: &apiv2beta1.Artifact{ + Namespace: "ns1", + Type: apiv2beta1.Artifact_Model, + Name: "test", + }, + }, + }, + }) + assert.Error(t, err) + assert.Equal(t, codes.InvalidArgument, err.(*util.UserError).ExternalStatusCode()) + assert.Contains(t, err.Error(), "does not belong to this Run ID") +} diff --git a/backend/src/apiserver/server/experiment_server_test.go b/backend/src/apiserver/server/experiment_server_test.go index aef0e190224..33465ef337c 100644 --- a/backend/src/apiserver/server/experiment_server_test.go +++ b/backend/src/apiserver/server/experiment_server_test.go @@ -835,12 +835,12 @@ func TestListExperiments(t *testing.T) { func TestListExperimentsByLastRunCreation(t *testing.T) { // Create experiment and runs/jobs under it. - clients, manager, experiment1, _ := initWithExperimentAndPipelineVersion(t) + clients, _, experiment1, _ := initWithExperimentAndPipelineVersion(t) defer clients.Close() // Create another experiment clients.UpdateUUID(util.NewFakeUUIDGeneratorOrFatal(DefaultFakeIdTwo, nil)) - manager = resource.NewResourceManager(clients, &resource.ResourceManagerOptions{CollectMetrics: false}) + manager := resource.NewResourceManager(clients, &resource.ResourceManagerOptions{CollectMetrics: false}) server := createExperimentServer(manager) experiment := &apiV2beta1.Experiment{DisplayName: "exp2"} experiment2, err := server.CreateExperiment(nil, &apiV2beta1.CreateExperimentRequest{Experiment: experiment}) @@ -910,6 +910,7 @@ func TestListExperimentsByLastRunCreation(t *testing.T) { listExperimentsRequest = &apiV2beta1.ListExperimentsRequest{SortBy: "last_run_created_at desc"} result, err = experimentServer.ListExperiments(nil, listExperimentsRequest) assert.Equal(t, []*apiV2beta1.Experiment{expected2, expected1}, result.Experiments) + assert.NoError(t, err) } func TestListExperimentsV1_Failed(t *testing.T) { diff --git a/backend/src/apiserver/server/fakes_test.go b/backend/src/apiserver/server/fakes_test.go index 0a699127ae9..473a535fc6a 100644 --- a/backend/src/apiserver/server/fakes_test.go +++ b/backend/src/apiserver/server/fakes_test.go @@ -349,6 +349,73 @@ func initWithOneTimeRun(t *testing.T) (*resource.FakeClientManager, *resource.Re return clientManager, manager, runDetail } +// initWithOneTimeRunV2 is the same as initWithOneTimeRun, but uses the V2 fake client manager +// that generates unique UUIDs to avoid unique constraint violations in tests that create +// multiple entities. +func initWithOneTimeRunV2(t *testing.T) (*resource.FakeClientManager, *resource.ResourceManager, *model.Run) { + initEnvVars() + clientManager := resource.NewFakeClientManagerOrFatalV2() + resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) + + // Create an experiment depending on multi-user mode + var apiExperiment *apiv1beta1.Experiment + if common.IsMultiUserMode() { + apiExperiment = &apiv1beta1.Experiment{ + Name: "exp1", + ResourceReferences: []*apiv1beta1.ResourceReference{ + { + Key: &apiv1beta1.ResourceKey{Type: apiv1beta1.ResourceType_NAMESPACE, Id: "ns1"}, + Relationship: apiv1beta1.Relationship_OWNER, + }, + }, + } + } else { + apiExperiment = &apiv1beta1.Experiment{ + Name: "exp1", + ResourceReferences: []*apiv1beta1.ResourceReference{ + { + Key: &apiv1beta1.ResourceKey{Type: apiv1beta1.ResourceType_NAMESPACE, Id: ""}, + Relationship: apiv1beta1.Relationship_OWNER, + }, + }, + } + } + modelExperiment, err := toModelExperiment(apiExperiment) + assert.Nil(t, err) + exp, err := resourceManager.CreateExperiment(modelExperiment) + assert.Nil(t, err) + + ctx := context.Background() + if common.IsMultiUserMode() { + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx = metadata.NewIncomingContext(context.Background(), md) + } + apiRun := &apiv1beta1.Run{ + Name: "run1", + PipelineSpec: &apiv1beta1.PipelineSpec{ + WorkflowManifest: testWorkflow.ToStringForStore(), + Parameters: []*apiv1beta1.Parameter{ + {Name: "param1", Value: "world"}, + }, + }, + ResourceReferences: []*apiv1beta1.ResourceReference{ + { + Key: &apiv1beta1.ResourceKey{Type: apiv1beta1.ResourceType_EXPERIMENT, Id: exp.UUID}, + Relationship: apiv1beta1.Relationship_OWNER, + }, + { + Key: &apiv1beta1.ResourceKey{Type: apiv1beta1.ResourceType_NAMESPACE, Id: exp.Namespace}, + Relationship: apiv1beta1.Relationship_OWNER, + }, + }, + } + modelRun, err := toModelRun(apiRun) + assert.Nil(t, err) + runDetail, err := resourceManager.CreateRun(ctx, modelRun) + assert.Nil(t, err) + return clientManager, resourceManager, runDetail +} + func AssertUserError(t *testing.T, err error, expectedCode codes.Code) { userError, ok := err.(*util.UserError) assert.True(t, ok) diff --git a/backend/src/apiserver/server/list_request_util.go b/backend/src/apiserver/server/list_request_util.go index ddac2680979..940abe29a19 100644 --- a/backend/src/apiserver/server/list_request_util.go +++ b/backend/src/apiserver/server/list_request_util.go @@ -232,3 +232,63 @@ func transformJSONForBackwardCompatibility(jsonStr string) (string, error) { ) return replacer.Replace(jsonStr), nil } + +// validateFilterV2Beta1Artifact creates filter context for artifacts based on namespace +func validateFilterV2Beta1Artifact(namespace string) (*model.FilterContext, error) { + filterContext := &model.FilterContext{} + if namespace != "" { + filterContext.ReferenceKey = &model.ReferenceKey{ + Type: model.NamespaceResourceType, + ID: namespace, + } + } + return filterContext, nil +} + +// validateFilterV2Beta1ArtifactTask creates filter contexts for artifact-task relationships +func validateFilterV2Beta1ArtifactTask(taskIds, runIds, artifactIds []string) ([]*model.FilterContext, error) { + var filterContexts []*model.FilterContext + + // Add task ID filters + for _, taskID := range taskIds { + if taskID != "" { + filterContexts = append(filterContexts, &model.FilterContext{ + ReferenceKey: &model.ReferenceKey{ + Type: model.TaskResourceType, + ID: taskID, + }, + }) + } + } + + // Add run ID filters + for _, runID := range runIds { + if runID != "" { + filterContexts = append(filterContexts, &model.FilterContext{ + ReferenceKey: &model.ReferenceKey{ + Type: model.RunResourceType, + ID: runID, + }, + }) + } + } + + // Add artifact ID filters + for _, artifactID := range artifactIds { + if artifactID != "" { + filterContexts = append(filterContexts, &model.FilterContext{ + ReferenceKey: &model.ReferenceKey{ + Type: model.ArtifactResourceType, + ID: artifactID, + }, + }) + } + } + + // If no filters specified, return empty filter context + if len(filterContexts) == 0 { + filterContexts = append(filterContexts, &model.FilterContext{}) + } + + return filterContexts, nil +} diff --git a/backend/src/apiserver/server/report_server.go b/backend/src/apiserver/server/report_server.go index fcda727f280..36943e27eb5 100644 --- a/backend/src/apiserver/server/report_server.go +++ b/backend/src/apiserver/server/report_server.go @@ -25,7 +25,6 @@ import ( apiv1beta1 "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" - "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/apiserver/resource" "github.com/kubeflow/pipelines/backend/src/common/util" scheduledworkflow "github.com/kubeflow/pipelines/backend/src/crd/pkg/apis/scheduledworkflow/v1beta1" @@ -48,18 +47,6 @@ type ReportServerV1 struct { apiv1beta1.UnimplementedReportServiceServer } -// Extracts task details from an execution spec and reports them to storage. -func (s *BaseReportServer) reportTasksFromExecution(execSpec util.ExecutionSpec, runId string) ([]*model.Task, error) { - if !execSpec.ExecutionStatus().HasNodes() { - return nil, nil - } - tasks, err := toModelTasks(execSpec) - if err != nil { - return nil, util.Wrap(err, "Failed to report tasks of an execution") - } - return s.resourceManager.CreateOrUpdateTasks(tasks) -} - // Reports a workflow. func (s *BaseReportServer) reportWorkflow(ctx context.Context, workflow string) (*emptypb.Empty, error) { execSpec, err := validateReportWorkflowRequest(workflow) @@ -77,16 +64,11 @@ func (s *BaseReportServer) reportWorkflow(ctx context.Context, workflow string) return nil, err } - newExecSpec, err := s.resourceManager.ReportWorkflowResource(ctx, *execSpec) + _, err = s.resourceManager.ReportWorkflowResource(ctx, *execSpec) if err != nil { return nil, util.Wrap(err, "Failed to report workflow") } - runId := newExecSpec.ExecutionObjectMeta().Labels[util.LabelKeyWorkflowRunId] - _, err = s.reportTasksFromExecution(newExecSpec, runId) - if err != nil { - return nil, util.Wrap(err, "Failed to report task details") - } return &emptypb.Empty{}, nil } diff --git a/backend/src/apiserver/server/run_server.go b/backend/src/apiserver/server/run_server.go index 3fa17d3de1a..ab63ddd776e 100644 --- a/backend/src/apiserver/server/run_server.go +++ b/backend/src/apiserver/server/run_server.go @@ -17,9 +17,10 @@ package server import ( "context" + "github.com/golang/glog" + "google.golang.org/grpc/codes" "google.golang.org/protobuf/types/known/emptypb" - "github.com/golang/glog" apiv1beta1 "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/common" @@ -29,7 +30,6 @@ import ( "github.com/kubeflow/pipelines/backend/src/common/util" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "google.golang.org/grpc/codes" authorizationv1 "k8s.io/api/authorization/v1" ) @@ -185,15 +185,7 @@ func (s *RunServerV1) CreateRunV1(ctx context.Context, request *apiv1beta1.Creat // Fetches a run. // Applies common logic on v1beta1 and v2beta1 API. func (s *BaseRunServer) getRun(ctx context.Context, runId string) (*model.Run, error) { - err := s.canAccessRun(ctx, runId, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbGet}) - if err != nil { - return nil, util.Wrap(err, "Failed to authorize the request") - } - run, err := s.resourceManager.GetRun(runId) - if err != nil { - return nil, err - } - return run, nil + return s.getRunWithHydration(ctx, runId, false) } // Fetches a run. @@ -214,11 +206,15 @@ func (s *RunServerV1) GetRunV1(ctx context.Context, request *apiv1beta1.GetRunRe // Fetches all runs that conform to the specified filter and listing options. // Applies common logic on v1beta1 and v2beta1 API. func (s *BaseRunServer) listRuns(ctx context.Context, pageToken string, pageSize int, sortBy string, opts *list.Options, namespace string, experimentId string) ([]*model.Run, int, string, error) { + return s.listRunsWithHydration(ctx, pageToken, pageSize, sortBy, opts, namespace, experimentId, true) +} + +func (s *BaseRunServer) listRunsWithHydration(ctx context.Context, pageToken string, pageSize int, sortBy string, opts *list.Options, namespace string, experimentID string, hydrateTasks bool) ([]*model.Run, int, string, error) { namespace = s.resourceManager.ReplaceNamespace(namespace) - if experimentId != "" { - ns, err := s.resourceManager.GetNamespaceFromExperimentId(experimentId) + if experimentID != "" { + ns, err := s.resourceManager.GetNamespaceFromExperimentId(experimentID) if err != nil { - return nil, 0, "", util.Wrapf(err, "Failed to list runs due to error fetching namespace for experiment %s. Try filtering based on namespace", experimentId) + return nil, 0, "", util.Wrapf(err, "Failed to list runs due to error fetching namespace for experiment %s. Try filtering based on namespace", experimentID) } namespace = ns } @@ -234,15 +230,15 @@ func (s *BaseRunServer) listRuns(ctx context.Context, pageToken string, pageSize filterContext := &model.FilterContext{ ReferenceKey: &model.ReferenceKey{Type: model.NamespaceResourceType, ID: namespace}, } - if experimentId != "" { - if err := s.resourceManager.CheckExperimentBelongsToNamespace(experimentId, namespace); err != nil { + if experimentID != "" { + if err := s.resourceManager.CheckExperimentBelongsToNamespace(experimentID, namespace); err != nil { return nil, 0, "", util.Wrap(err, "Failed to list runs due to namespace mismatch") } filterContext = &model.FilterContext{ - ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: experimentId}, + ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: experimentID}, } } - runs, totalSize, token, err := s.resourceManager.ListRuns(filterContext, opts) + runs, totalSize, token, err := s.resourceManager.ListRunsWithHydration(filterContext, opts, hydrateTasks) if err != nil { return nil, 0, "", err } @@ -367,22 +363,22 @@ func (s *RunServerV1) DeleteRunV1(ctx context.Context, request *apiv1beta1.Delet // Reports run metrics. // Applies common logic on v1beta1 and v2beta1 API. -func (s *BaseRunServer) reportRunMetrics(ctx context.Context, metrics []*model.RunMetric, runId string) ([]map[string]string, error) { - err := s.canAccessRun(ctx, runId, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbReportMetrics}) +func (s *BaseRunServer) reportRunMetricsV1(ctx context.Context, metrics []*model.RunMetricV1, runID string) ([]map[string]string, error) { + err := s.canAccessRun(ctx, runID, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbReportMetrics}) if err != nil { return nil, util.Wrap(err, "Failed to authorize the request") } // Verify that the run exists for single user mode. // Multi-user model will verify this when checking authorization above. if !common.IsMultiUserMode() { - if _, err := s.resourceManager.GetRun(runId); err != nil { + if _, err := s.resourceManager.GetRun(runID); err != nil { return nil, util.Wrap(err, "Failed to fetch the requested run") } } results := make([]map[string]string, 0) for _, metric := range metrics { temp := map[string]string{"Name": metric.Name, "NodeId": metric.NodeID, "ErrorCode": "", "ErrorMessage": ""} - if err := validateRunMetric(metric); err != nil { + if err := validateRunMetricV1(metric); err != nil { temp["ErrorCode"] = "invalid" results = append(results, temp) continue @@ -416,8 +412,8 @@ func (s *BaseRunServer) reportRunMetrics(ctx context.Context, metrics []*model.R return results, nil } -// Reports run metrics. -// Supports v1beta1 API. +// ReportRunMetricsV1 reports run metrics. +// Supports v1beta1 API. Deprecated. func (s *RunServerV1) ReportRunMetricsV1(ctx context.Context, request *apiv1beta1.ReportRunMetricsRequest) (*apiv1beta1.ReportRunMetricsResponse, error) { if s.options.CollectMetrics { reportRunMetricsRequests.Inc() @@ -434,7 +430,7 @@ func (s *RunServerV1) ReportRunMetricsV1(ctx context.Context, request *apiv1beta // Convert, validate, and report each metric in input order. var apiResults []*apiv1beta1.ReportRunMetricsResponse_ReportRunMetricResult for _, m := range request.GetMetrics() { - modelMetric, err := toModelRunMetric(m, request.GetRunId()) + modelMetric, err := toModelRunMetricV1(m, request.GetRunId()) if err != nil { // Conversion error: record as INVALID_ARGUMENT msg := err.Error() @@ -447,7 +443,7 @@ func (s *RunServerV1) ReportRunMetricsV1(ctx context.Context, request *apiv1beta continue } // Report this metric - results, err := s.reportRunMetrics(ctx, []*model.RunMetric{modelMetric}, request.GetRunId()) + results, err := s.reportRunMetricsV1(ctx, []*model.RunMetricV1{modelMetric}, request.GetRunId()) if err != nil { return nil, util.Wrap(err, "Failed to report v1beta1 run metrics") } @@ -560,7 +556,12 @@ func (s *RunServer) GetRun(ctx context.Context, request *apiv2beta1.GetRunReques getRunRequests.Inc() } - run, err := s.getRun(ctx, request.RunId) + // Determine if we should hydrate tasks based on view parameter + // Default view (or unspecified) means no task hydration, only task count + // FULL view means full task hydration + hydrateTasks := request.View != nil && *request.View == apiv2beta1.GetRunRequest_FULL + + run, err := s.getRunWithHydration(ctx, request.RunId, hydrateTasks) if err != nil { return nil, util.Wrap(err, "Failed to get a run") } @@ -568,6 +569,18 @@ func (s *RunServer) GetRun(ctx context.Context, request *apiv2beta1.GetRunReques return toApiRun(run), nil } +func (s *BaseRunServer) getRunWithHydration(ctx context.Context, runID string, hydrateTasks bool) (*model.Run, error) { + err := s.canAccessRun(ctx, runID, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbGet}) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize the request") + } + run, err := s.resourceManager.GetRunWithHydration(runID, hydrateTasks) + if err != nil { + return nil, err + } + return run, nil +} + // Fetches runs given query parameters. // Supports v2beta1 behavior. func (s *RunServer) ListRuns(ctx context.Context, r *apiv2beta1.ListRunsRequest) (*apiv2beta1.ListRunsResponse, error) { @@ -578,7 +591,13 @@ func (s *RunServer) ListRuns(ctx context.Context, r *apiv2beta1.ListRunsRequest) if err != nil { return nil, util.Wrap(err, "Failed to create list options") } - runs, runsCount, nextPageToken, err := s.listRuns(ctx, r.GetPageToken(), int(r.GetPageSize()), r.GetSortBy(), opts, r.GetNamespace(), r.GetExperimentId()) + + // Determine if we should hydrate tasks based on view parameter + // Default view (or unspecified) means no task hydration, only task count + // FULL view means full task hydration + hydrateTasks := r.View != nil && *r.View == apiv2beta1.ListRunsRequest_FULL + + runs, runsCount, nextPageToken, err := s.listRunsWithHydration(ctx, r.GetPageToken(), int(r.GetPageSize()), r.GetSortBy(), opts, r.GetNamespace(), r.GetExperimentId(), hydrateTasks) if err != nil { return nil, util.Wrap(err, "Failed to list runs") } @@ -676,9 +695,267 @@ func (s *RunServer) RetryRun(ctx context.Context, request *apiv2beta1.RetryRunRe return &emptypb.Empty{}, nil } -// Checks if a user can access a run. -// Adds namespace of the parent experiment of a run id, -// API group, version, and resource type. +// CreateTask Creates an API Task +func (s *RunServer) CreateTask(ctx context.Context, request *apiv2beta1.CreateTaskRequest) (*apiv2beta1.PipelineTaskDetail, error) { + task := request.GetTask() + if task == nil { + return nil, util.NewInvalidInputError("Task is required") + } + + // Check authorization - Tasks inherit permissions from their parent run + err := s.canAccessRun(ctx, task.GetRunId(), &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbUpdate}) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize task creation") + } + + modelTask, err := toModelTask(task) + if err != nil { + return nil, util.Wrap(err, "Failed to convert task to model") + } + createdTask, err := s.resourceManager.CreateTask(modelTask) + if err != nil { + return nil, util.Wrap(err, "Failed to create task") + } + + // A newly created task has no children + var noChildTasks []*model.Task + + return toAPITask(createdTask, noChildTasks) +} + +// UpdateTask updates an existing task with the specified task ID and details provided in the request. +// It validates input, ensures authorization, and returns the updated task details or an error if the update fails. +func (s *RunServer) UpdateTask(ctx context.Context, request *apiv2beta1.UpdateTaskRequest) (*apiv2beta1.PipelineTaskDetail, error) { + taskID := request.GetTaskId() + task := request.GetTask() + if taskID == "" { + return nil, util.NewInvalidInputError("Task ID is required") + } + if task == nil { + return nil, util.NewInvalidInputError("Task is required") + } + // Ensure task IDs match - prefer the path parameter for authorization + if task.GetTaskId() != "" && task.GetTaskId() != taskID { + return nil, util.NewInvalidInputError("Task ID in path parameter does not match task ID in request body") + } + + // First get the existing task to find the run UUID for authorization + existingTask, err := s.resourceManager.GetTask(taskID) + if err != nil { + return nil, util.Wrap(err, "Failed to get existing task for authorization") + } + + // Check authorization using the existing task's run UUID + err = s.canAccessRun(ctx, existingTask.RunUUID, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbUpdate}) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize task update") + } + + modelTask, err := toModelTask(task) + if err != nil { + return nil, util.Wrap(err, "Failed to convert task to model") + } + modelTask.UUID = taskID // Always use the path parameter task ID + updatedTask, err := s.resourceManager.UpdateTask(modelTask) + if err != nil { + return nil, util.Wrap(err, "Failed to update task") + } + + taskChildren, err := s.resourceManager.GetTaskChildren(updatedTask.UUID) + if err != nil { + return nil, util.Wrap(err, "Failed to get task children") + } + return toAPITask(updatedTask, taskChildren) +} + +// UpdateTasksBulk updates multiple tasks in bulk. +func (s *RunServer) UpdateTasksBulk(ctx context.Context, request *apiv2beta1.UpdateTasksBulkRequest) (*apiv2beta1.UpdateTasksBulkResponse, error) { + if request == nil || len(request.GetTasks()) == 0 { + return nil, util.NewInvalidInputError("UpdateTasksBulkRequest must contain at least one task") + } + + response := &apiv2beta1.UpdateTasksBulkResponse{ + Tasks: make(map[string]*apiv2beta1.PipelineTaskDetail), + } + + // Validate and update each task + for taskID, task := range request.GetTasks() { + if taskID == "" { + return nil, util.NewInvalidInputError("Task ID is required") + } + if task == nil { + return nil, util.NewInvalidInputError("Task is required for task ID %s", taskID) + } + + // Ensure task IDs match - prefer the map key for authorization + if task.GetTaskId() != "" && task.GetTaskId() != taskID { + return nil, util.NewInvalidInputError("Task ID in map key does not match task ID in task detail for task %s", taskID) + } + + // First get the existing task to find the run UUID for authorization + existingTask, err := s.resourceManager.GetTask(taskID) + if err != nil { + return nil, util.Wrapf(err, "Failed to get existing task %s for authorization", taskID) + } + + // Check authorization using the existing task's run UUID + err = s.canAccessRun(ctx, existingTask.RunUUID, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbUpdate}) + if err != nil { + return nil, util.Wrapf(err, "Failed to authorize task update for task %s", taskID) + } + + modelTask, err := toModelTask(task) + if err != nil { + return nil, util.Wrapf(err, "Failed to convert task to model for task %s", taskID) + } + modelTask.UUID = taskID // Always use the map key task ID + + updatedTask, err := s.resourceManager.UpdateTask(modelTask) + if err != nil { + return nil, util.Wrapf(err, "Failed to update task %s", taskID) + } + + taskChildren, err := s.resourceManager.GetTaskChildren(updatedTask.UUID) + if err != nil { + return nil, util.Wrapf(err, "Failed to get task children for task %s", taskID) + } + + apiTask, err := toAPITask(updatedTask, taskChildren) + if err != nil { + return nil, util.Wrapf(err, "Failed to convert task to API for task %s", taskID) + } + response.Tasks[taskID] = apiTask + } + + return response, nil +} + +// GetTask retrieves the details of a specific task based on its ID and performs authorization checks. +func (s *RunServer) GetTask(ctx context.Context, request *apiv2beta1.GetTaskRequest) (*apiv2beta1.PipelineTaskDetail, error) { + taskID := request.GetTaskId() + if taskID == "" { + return nil, util.NewInvalidInputError("Task ID is required") + } + + task, err := s.resourceManager.GetTask(taskID) + if err != nil { + return nil, util.Wrap(err, "Failed to get task") + } + + // Check authorization using the task's run UUID + err = s.canAccessRun(ctx, task.RunUUID, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbGet}) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize task access") + } + + childTasks, err := s.resourceManager.GetTaskChildren(task.UUID) + if err != nil { + return nil, util.Wrap(err, "Failed to get task children") + } + return toAPITask(task, childTasks) +} + +// ListTasks retrieves a list of tasks based on a specified run ID, parent task ID, or namespace, enforcing mutual exclusivity. +// It validates authorization, processes pagination options, and ensures namespace consistency within the data. +func (s *RunServer) ListTasks(ctx context.Context, request *apiv2beta1.ListTasksRequest) (*apiv2beta1.ListTasksResponse, error) { + // Check which filter is set using the oneof field + // This allows empty namespace in single-user mode + var runID, parentID, namespace string + var filterType string + + switch filter := request.ParentFilter.(type) { + case *apiv2beta1.ListTasksRequest_RunId: + runID = filter.RunId + filterType = "run_id" + case *apiv2beta1.ListTasksRequest_ParentId: + parentID = filter.ParentId + filterType = "parent_id" + case *apiv2beta1.ListTasksRequest_Namespace: + namespace = filter.Namespace + filterType = "namespace" + default: + // One of these fields is required to enforce RBAC on this request in multi-user mode. + // In the case of run IDs, we use the associated run's namespace to enforce RBAC. + // In the namespace case, we use the namespace to enforce RBAC. + if common.IsMultiUserMode() { + return nil, util.NewInvalidInputError("Either run_id, parent_id, or namespace is required in multi-user mode") + } + // In single-user mode, allow the namespace to be empty. + filterType = "namespace" + } + + // Check authorization and get expected namespace based on filter type + switch filterType { + case "run_id": + err := s.canAccessRun(ctx, runID, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbList}) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize task listing") + } + case "parent_id": + // parent_id is provided, get the parent task to find the run_id and namespace + parentTask, err := s.resourceManager.GetTask(parentID) + if err != nil { + return nil, util.Wrap(err, "Failed to get parent task for authorization") + } + err = s.canAccessRun(ctx, parentTask.RunUUID, &authorizationv1.ResourceAttributes{Verb: common.RbacResourceVerbList}) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize task listing") + } + case "namespace": + // For namespace filtering, check if user has get permission on runs in this namespace + if common.IsMultiUserMode() { + if namespace == "" { + return nil, util.NewInvalidInputError("Namespace is required in multi-user mode") + } + resourceAttributes := &authorizationv1.ResourceAttributes{ + Namespace: namespace, + Verb: common.RbacResourceVerbGet, + Group: common.RbacPipelinesGroup, + Version: common.RbacPipelinesVersion, + Resource: common.RbacResourceTypeRuns, + } + err := s.resourceManager.IsAuthorized(ctx, resourceAttributes) + if err != nil { + return nil, util.Wrapf(err, "Failed to authorize task listing by namespace. Check if you have access to runs in namespace %s", namespace) + } + } else { + // It doesn't matter if users specify a namespace in single-user mode, namespaces are not set in this mode + // so we just list everything. + namespace = "" + } + } + + opts, err := validatedListOptions(&model.Task{}, request.GetPageToken(), int(request.GetPageSize()), request.GetOrderBy(), request.GetFilter(), "v2beta1") + if err != nil { + return nil, util.Wrap(err, "Failed to create list options") + } + + // Pass namespaceSet=true when namespace filter was explicitly set + tasks, totalSize, nextPageToken, err := s.resourceManager.ListTasks(runID, parentID, namespace, opts) + if err != nil { + return nil, util.Wrap(err, "Failed to list tasks") + } + + apiTasks := make([]*apiv2beta1.PipelineTaskDetail, len(tasks)) + for i, task := range tasks { + taskChildren, err := s.resourceManager.GetTaskChildren(task.UUID) + if err != nil { + return nil, util.Wrap(err, "Failed to get task children") + } + apiTasks[i], err = toAPITask(task, taskChildren) + if err != nil { + return nil, util.Wrap(err, "Failed to convert task to API") + } + } + + return &apiv2beta1.ListTasksResponse{ + Tasks: apiTasks, + NextPageToken: nextPageToken, + TotalSize: int32(totalSize), + }, nil +} + +// canAccessRun verifies if the current user has access to a specified run utilizing the provided resource attributes. func (s *BaseRunServer) canAccessRun(ctx context.Context, runId string, resourceAttributes *authorizationv1.ResourceAttributes) error { if !common.IsMultiUserMode() { // Skip authz if not multi-user mode. diff --git a/backend/src/apiserver/server/run_server_tasks_test.go b/backend/src/apiserver/server/run_server_tasks_test.go new file mode 100644 index 00000000000..65d123ab9b1 --- /dev/null +++ b/backend/src/apiserver/server/run_server_tasks_test.go @@ -0,0 +1,689 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + "context" + "sort" + "testing" + + apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" + "github.com/kubeflow/pipelines/backend/src/apiserver/resource" + "github.com/spf13/viper" + "github.com/stretchr/testify/assert" + "google.golang.org/protobuf/types/known/structpb" +) + +// Helper to create a simple run via resource manager and return its ID. +func seedOneRun(t *testing.T) (*resource.FakeClientManager, *resource.ResourceManager, string) { + clients, manager, run := initWithOneTimeRunV2(t) + return clients, manager, run.UUID +} + +func TestTask_Create_Update_Get_List(t *testing.T) { + // Single-user mode by default; keep it to bypass authz. + clients, manager, runID := seedOneRun(t) + defer clients.Close() + + runSrv := createRunServer(manager) + + // Create task with inputs/outputs + v1, err := structpb.NewValue("v1") + assert.NoError(t, err) + v2, err := structpb.NewValue("3.14") + assert.NoError(t, err) + inParams := []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + { + Value: v1, + ParameterKey: "p1", + }, + } + outParams := []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + { + Value: v2, + ParameterKey: "op1", + }, + } + createReq := &apiv2beta1.CreateTaskRequest{Task: &apiv2beta1.PipelineTaskDetail{ + RunId: runID, + Name: "trainer", + State: apiv2beta1.PipelineTaskDetail_RUNNING, + Type: apiv2beta1.PipelineTaskDetail_RUNTIME, + Inputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{Parameters: inParams}, + Outputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{Parameters: outParams}, + }} + created, err := runSrv.CreateTask(context.Background(), createReq) + assert.NoError(t, err) + assert.NotEmpty(t, created.GetTaskId()) + assert.Equal(t, runID, created.GetRunId()) + assert.Equal(t, "trainer", created.GetName()) + // Verify inputs/outputs echoed back + assert.Len(t, created.GetInputs().GetParameters(), 1) + assert.Equal(t, "p1", created.GetInputs().GetParameters()[0].GetParameterKey()) + assert.Len(t, created.GetOutputs().GetParameters(), 1) + assert.Equal(t, "op1", created.GetOutputs().GetParameters()[0].GetParameterKey()) + + // Update task: change status and outputs + updReq := &apiv2beta1.UpdateTaskRequest{TaskId: created.GetTaskId(), Task: &apiv2beta1.PipelineTaskDetail{ + TaskId: created.GetTaskId(), + RunId: runID, + Name: "trainer", + State: apiv2beta1.PipelineTaskDetail_SUCCEEDED, + Outputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{Parameters: []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + { + Value: func() *structpb.Value { v, _ := structpb.NewValue("done"); return v }(), + ParameterKey: "op1", + }, + }}, + }} + updated, err := runSrv.UpdateTask(context.Background(), updReq) + assert.NoError(t, err) + assert.Equal(t, apiv2beta1.PipelineTaskDetail_SUCCEEDED, updated.GetState()) + // Parameter values are merged, not overridden + + params := updated.GetOutputs().GetParameters() + sortParams(params) + assert.Equal(t, "3.14", params[0].GetValue().AsInterface()) + assert.Equal(t, "done", params[1].GetValue().AsInterface()) + + // GetTask + got, err := runSrv.GetTask(context.Background(), &apiv2beta1.GetTaskRequest{TaskId: created.GetTaskId()}) + assert.NoError(t, err) + assert.Equal(t, created.GetTaskId(), got.GetTaskId()) + assert.Equal(t, apiv2beta1.PipelineTaskDetail_SUCCEEDED, got.GetState()) + + // ListTasks by run ID + listResp, err := runSrv.ListTasks(context.Background(), &apiv2beta1.ListTasksRequest{ParentFilter: &apiv2beta1.ListTasksRequest_RunId{RunId: runID}, PageSize: 50}) + assert.NoError(t, err) + assert.GreaterOrEqual(t, int(listResp.GetTotalSize()), 1) + found := false + for _, tt := range listResp.GetTasks() { + if tt.GetTaskId() == created.GetTaskId() { + found = true + break + } + } + assert.True(t, found) +} + +func TestTask_RunHydration_WithInputsOutputs_ArtifactsAndMetrics(t *testing.T) { + // Multi-user on to exercise auth paths, but use helper ctx for headers. + viper.Set(common.MultiUserMode, "true") + t.Cleanup(func() { viper.Set(common.MultiUserMode, "false") }) + + clients, manager, run := initWithOneTimeRun(t) + defer clients.Close() + + runSrv := createRunServer(manager) + artSrv := createArtifactServer(manager) + + // Create a task with IO + create := &apiv2beta1.CreateTaskRequest{ + Task: &apiv2beta1.PipelineTaskDetail{ + RunId: run.UUID, + Name: "preprocess", + State: apiv2beta1.PipelineTaskDetail_RUNNING, + Inputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{Parameters: []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + { + Value: func() *structpb.Value { v, _ := structpb.NewValue("0.5"); return v }(), + ParameterKey: "threshold", + }, + }}, + Outputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{Parameters: []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + { + Value: func() *structpb.Value { v, _ := structpb.NewValue("100"); return v }(), + ParameterKey: "rows", + }, + }}, + }} + created, err := runSrv.CreateTask(ctxWithUser(), create) + assert.NoError(t, err) + + // Create an artifact and link it as output of the task + _, err = artSrv.CreateArtifact(ctxWithUser(), + &apiv2beta1.CreateArtifactRequest{ + RunId: run.UUID, + TaskId: created.GetTaskId(), + ProducerKey: "some-parent-task-output", + Type: apiv2beta1.IOType_TASK_OUTPUT_INPUT, + Artifact: &apiv2beta1.Artifact{ + Namespace: run.Namespace, + Type: apiv2beta1.Artifact_Model, + Uri: strPTR("gs://bucket/model"), + Name: "m1", + }}) + assert.NoError(t, err) + + // Confirm a link was created between the task and the artifact + artifactTasks, err := artSrv.ListArtifactTasks(ctxWithUser(), &apiv2beta1.ListArtifactTasksRequest{ + TaskIds: []string{created.GetTaskId()}, + RunIds: []string{run.UUID}, + PageSize: 10, + }) + assert.NoError(t, err) + assert.Equal(t, int32(1), artifactTasks.GetTotalSize()) + assert.Equal(t, 1, len(artifactTasks.GetArtifactTasks())) + + // Update task outputs to include an artifact reference in OutputArtifacts + _, err = runSrv.UpdateTask(ctxWithUser(), + &apiv2beta1.UpdateTaskRequest{ + TaskId: created.GetTaskId(), + Task: &apiv2beta1.PipelineTaskDetail{ + TaskId: created.GetTaskId(), + RunId: run.UUID, + State: apiv2beta1.PipelineTaskDetail_SUCCEEDED, + Outputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{}, + }}) + assert.NoError(t, err) + + // Now fetch the run and ensure tasks are hydrated with inputs/outputs + fullView := apiv2beta1.GetRunRequest_FULL + gr, err := runSrv.GetRun(ctxWithUser(), &apiv2beta1.GetRunRequest{RunId: run.UUID, View: &fullView}) + assert.NoError(t, err) + assert.NotNil(t, gr) + assert.GreaterOrEqual(t, len(gr.GetTasks()), 1) + var taskFound *apiv2beta1.PipelineTaskDetail + for _, tt := range gr.GetTasks() { + if tt.GetTaskId() == created.GetTaskId() { + taskFound = tt + break + } + } + if assert.NotNil(t, taskFound, "created task not present in hydrated run") { + // Parameters present + assert.Equal(t, 1, len(taskFound.GetInputs().GetParameters())) + if assert.NotNil(t, taskFound.GetInputs().GetParameters(), "parameters not present in hydrated task") { + assert.Equal(t, "threshold", taskFound.GetInputs().GetParameters()[0].GetParameterKey()) + // Outputs updated and artifact reference present + assert.Equal(t, apiv2beta1.PipelineTaskDetail_SUCCEEDED, taskFound.GetState()) + } + assert.Equal(t, 1, len(taskFound.GetOutputs().GetArtifacts())) + if assert.NotNil(t, taskFound.GetOutputs().GetArtifacts(), "artifacts not present in hydrated task") { + ioArtifact := taskFound.GetOutputs().GetArtifacts()[0] + assert.Equal(t, "some-parent-task-output", ioArtifact.GetArtifactKey()) + if assert.NotNil(t, ioArtifact.GetProducer()) { + assert.Equal(t, taskFound.Name, ioArtifact.GetProducer().GetTaskName()) + } + if len(ioArtifact.GetArtifacts()) > 0 { + artifact := ioArtifact.GetArtifacts()[0] + assert.Equal(t, "gs://bucket/model", *artifact.Uri) + assert.Equal(t, "m1", artifact.Name) + } + } + } + +} + +func TestListTasks_ByParent(t *testing.T) { + cm, rm, runID := seedOneRun(t) + defer cm.Close() + server := createRunServer(rm) + + // Create parent task + parent, err := server.CreateTask(context.Background(), + &apiv2beta1.CreateTaskRequest{ + Task: &apiv2beta1.PipelineTaskDetail{ + RunId: runID, + Name: "parent", + }, + }, + ) + assert.NoError(t, err) + + // Create child task with ParentTaskId + child, err := server.CreateTask(context.Background(), + &apiv2beta1.CreateTaskRequest{ + Task: &apiv2beta1.PipelineTaskDetail{ + RunId: runID, + Name: "child", + ParentTaskId: strPTR(parent.GetTaskId()), + }, + }, + ) + assert.NoError(t, err) + assert.NotEmpty(t, child.GetTaskId()) + + // List by parent ID + resp, err := server.ListTasks(context.Background(), + &apiv2beta1.ListTasksRequest{ + ParentFilter: &apiv2beta1.ListTasksRequest_ParentId{ + ParentId: parent.GetTaskId(), + }, + PageSize: 50, + }) + assert.NoError(t, err) + assert.Equal(t, int32(1), resp.GetTotalSize()) + assert.Equal(t, 1, len(resp.GetTasks())) + assert.Equal(t, child.GetTaskId(), resp.GetTasks()[0].GetTaskId()) +} + +func TestUpdateTasksBulk_Success(t *testing.T) { + // Single-user mode to bypass authz + clients, manager, runID := seedOneRun(t) + defer clients.Close() + + runSrv := createRunServer(manager) + + // Create three tasks + v1, _ := structpb.NewValue("initial1") + v2, _ := structpb.NewValue("initial2") + v3, _ := structpb.NewValue("initial3") + + task1, err := runSrv.CreateTask(context.Background(), &apiv2beta1.CreateTaskRequest{ + Task: &apiv2beta1.PipelineTaskDetail{ + RunId: runID, + Name: "task1", + State: apiv2beta1.PipelineTaskDetail_RUNNING, + Outputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{ + Parameters: []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + {Value: v1, ParameterKey: "out1"}, + }, + }, + }, + }) + assert.NoError(t, err) + + task2, err := runSrv.CreateTask(context.Background(), &apiv2beta1.CreateTaskRequest{ + Task: &apiv2beta1.PipelineTaskDetail{ + RunId: runID, + Name: "task2", + State: apiv2beta1.PipelineTaskDetail_RUNNING, + Outputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{ + Parameters: []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + {Value: v2, ParameterKey: "out2"}, + }, + }, + }, + }) + assert.NoError(t, err) + + task3, err := runSrv.CreateTask(context.Background(), &apiv2beta1.CreateTaskRequest{ + Task: &apiv2beta1.PipelineTaskDetail{ + RunId: runID, + Name: "task3", + State: apiv2beta1.PipelineTaskDetail_RUNNING, + Outputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{ + Parameters: []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + {Value: v3, ParameterKey: "out3"}, + }, + }, + }, + }) + assert.NoError(t, err) + + // Update all three tasks in bulk + updatedV1, _ := structpb.NewValue("updated1") + updatedV2, _ := structpb.NewValue("updated2") + updatedV3, _ := structpb.NewValue("updated3") + + bulkReq := &apiv2beta1.UpdateTasksBulkRequest{ + Tasks: map[string]*apiv2beta1.PipelineTaskDetail{ + task1.GetTaskId(): { + TaskId: task1.GetTaskId(), + RunId: runID, + Name: "task1", + State: apiv2beta1.PipelineTaskDetail_SUCCEEDED, + Outputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{ + Parameters: []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + {Value: updatedV1, ParameterKey: "out1"}, + }, + }, + }, + task2.GetTaskId(): { + TaskId: task2.GetTaskId(), + RunId: runID, + Name: "task2", + State: apiv2beta1.PipelineTaskDetail_FAILED, + Outputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{ + Parameters: []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + {Value: updatedV2, ParameterKey: "out2"}, + }, + }, + }, + task3.GetTaskId(): { + TaskId: task3.GetTaskId(), + RunId: runID, + Name: "task3", + State: apiv2beta1.PipelineTaskDetail_SKIPPED, + Outputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{ + Parameters: []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + {Value: updatedV3, ParameterKey: "out3"}, + }, + }, + }, + }, + } + + resp, err := runSrv.UpdateTasksBulk(context.Background(), bulkReq) + assert.NoError(t, err) + assert.NotNil(t, resp) + assert.Equal(t, 3, len(resp.GetTasks())) + + // Verify each task was updated correctly + updatedTask1 := resp.GetTasks()[task1.GetTaskId()] + assert.NotNil(t, updatedTask1) + assert.Equal(t, apiv2beta1.PipelineTaskDetail_SUCCEEDED, updatedTask1.GetState()) + params := updatedTask1.GetOutputs().GetParameters() + sortParams(params) + assert.Equal(t, "initial1", params[0].GetValue().AsInterface()) + assert.Equal(t, "updated1", params[1].GetValue().AsInterface()) + + updatedTask2 := resp.GetTasks()[task2.GetTaskId()] + assert.NotNil(t, updatedTask2) + assert.Equal(t, apiv2beta1.PipelineTaskDetail_FAILED, updatedTask2.GetState()) + params = updatedTask2.GetOutputs().GetParameters() + sortParams(params) + assert.Equal(t, "initial2", params[0].GetValue().AsInterface()) + assert.Equal(t, "updated2", params[1].GetValue().AsInterface()) + + updatedTask3 := resp.GetTasks()[task3.GetTaskId()] + assert.NotNil(t, updatedTask3) + assert.Equal(t, apiv2beta1.PipelineTaskDetail_SKIPPED, updatedTask3.GetState()) + params = updatedTask3.GetOutputs().GetParameters() + sortParams(params) + assert.Equal(t, "initial3", params[0].GetValue().AsInterface()) + assert.Equal(t, "updated3", params[1].GetValue().AsInterface()) + + // Verify updates persisted by fetching individually + fetched1, err := runSrv.GetTask(context.Background(), &apiv2beta1.GetTaskRequest{TaskId: task1.GetTaskId()}) + assert.NoError(t, err) + assert.Equal(t, apiv2beta1.PipelineTaskDetail_SUCCEEDED, fetched1.GetState()) + + fetched2, err := runSrv.GetTask(context.Background(), &apiv2beta1.GetTaskRequest{TaskId: task2.GetTaskId()}) + assert.NoError(t, err) + assert.Equal(t, apiv2beta1.PipelineTaskDetail_FAILED, fetched2.GetState()) + + fetched3, err := runSrv.GetTask(context.Background(), &apiv2beta1.GetTaskRequest{TaskId: task3.GetTaskId()}) + assert.NoError(t, err) + assert.Equal(t, apiv2beta1.PipelineTaskDetail_SKIPPED, fetched3.GetState()) +} + +func sortParams(params []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter) []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter { + sort.Slice(params, func(i, j int) bool { + return params[i].GetValue().GetStringValue() < params[j].GetValue().GetStringValue() + }) + return params +} + +func TestUpdateTasksBulk_EmptyRequest(t *testing.T) { + clients, manager, _ := seedOneRun(t) + defer clients.Close() + + runSrv := createRunServer(manager) + + // Test with nil request + _, err := runSrv.UpdateTasksBulk(context.Background(), nil) + assert.Error(t, err) + assert.Contains(t, err.Error(), "must contain at least one task") + + // Test with empty tasks map + _, err = runSrv.UpdateTasksBulk(context.Background(), &apiv2beta1.UpdateTasksBulkRequest{ + Tasks: map[string]*apiv2beta1.PipelineTaskDetail{}, + }) + assert.Error(t, err) + assert.Contains(t, err.Error(), "must contain at least one task") +} + +func TestUpdateTasksBulk_ValidationErrors(t *testing.T) { + clients, manager, runID := seedOneRun(t) + defer clients.Close() + + runSrv := createRunServer(manager) + + // Create a task first + task, err := runSrv.CreateTask(context.Background(), &apiv2beta1.CreateTaskRequest{ + Task: &apiv2beta1.PipelineTaskDetail{ + RunId: runID, + Name: "test-task", + State: apiv2beta1.PipelineTaskDetail_RUNNING, + }, + }) + assert.NoError(t, err) + + // Test with mismatched task IDs + _, err = runSrv.UpdateTasksBulk(context.Background(), &apiv2beta1.UpdateTasksBulkRequest{ + Tasks: map[string]*apiv2beta1.PipelineTaskDetail{ + task.GetTaskId(): { + TaskId: "different-id", // Mismatch! + RunId: runID, + State: apiv2beta1.PipelineTaskDetail_SUCCEEDED, + }, + }, + }) + assert.Error(t, err) + assert.Contains(t, err.Error(), "does not match") + + // Test with artifact updates + tasksResp, err := runSrv.UpdateTasksBulk(context.Background(), &apiv2beta1.UpdateTasksBulkRequest{ + Tasks: map[string]*apiv2beta1.PipelineTaskDetail{ + task.GetTaskId(): { + TaskId: task.GetTaskId(), + RunId: runID, + State: apiv2beta1.PipelineTaskDetail_SUCCEEDED, + Outputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{ + Artifacts: []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOArtifact{ + {ArtifactKey: "should-fail"}, + }, + }, + }, + }, + }) + assert.NoError(t, err) + // Artifact updates should be ignored, these need to be created via Artifacts API. + assert.Empty(t, tasksResp.GetTasks()[task.GetTaskId()].GetOutputs().GetArtifacts()) + + // Test with non-existent task + _, err = runSrv.UpdateTasksBulk(context.Background(), &apiv2beta1.UpdateTasksBulkRequest{ + Tasks: map[string]*apiv2beta1.PipelineTaskDetail{ + "non-existent-task-id": { + TaskId: "non-existent-task-id", + RunId: runID, + State: apiv2beta1.PipelineTaskDetail_SUCCEEDED, + }, + }, + }) + assert.Error(t, err) + assert.Contains(t, err.Error(), "Failed to get existing task") +} + +func TestListTasks_ByNamespace(t *testing.T) { + // Enable multi-user mode to ensure runs have namespaces + viper.Set(common.MultiUserMode, "true") + t.Cleanup(func() { viper.Set(common.MultiUserMode, "false") }) + + // Use initWithOneTimeRunV2 which creates a run with unique UUIDs + clients, manager, run := initWithOneTimeRunV2(t) + defer clients.Close() + + runSrv := createRunServer(manager) + runID := run.UUID + originalNamespace := run.Namespace + + // Verify namespace is set + assert.NotEmpty(t, originalNamespace, "Run namespace should not be empty in multi-user mode") + + // Create tasks in the default namespace (use ctxWithUser for multi-user mode) + task1, err := runSrv.CreateTask(ctxWithUser(), &apiv2beta1.CreateTaskRequest{ + Task: &apiv2beta1.PipelineTaskDetail{ + RunId: runID, + Name: "task-1", + State: apiv2beta1.PipelineTaskDetail_RUNNING, + }, + }) + assert.NoError(t, err) + + task2, err := runSrv.CreateTask(ctxWithUser(), &apiv2beta1.CreateTaskRequest{ + Task: &apiv2beta1.PipelineTaskDetail{ + RunId: runID, + Name: "task-2", + State: apiv2beta1.PipelineTaskDetail_SUCCEEDED, + }, + }) + assert.NoError(t, err) + + // List tasks by the default namespace - should return both tasks + resp1, err := runSrv.ListTasks(ctxWithUser(), &apiv2beta1.ListTasksRequest{ + ParentFilter: &apiv2beta1.ListTasksRequest_Namespace{ + Namespace: originalNamespace, + }, + PageSize: 50, + }) + assert.NoError(t, err) + assert.Equal(t, int32(2), resp1.GetTotalSize()) + assert.Equal(t, 2, len(resp1.GetTasks())) + + // Verify the returned tasks are from the default namespace + taskIDs := map[string]bool{ + task1.GetTaskId(): false, + task2.GetTaskId(): false, + } + for _, task := range resp1.GetTasks() { + if _, exists := taskIDs[task.GetTaskId()]; exists { + taskIDs[task.GetTaskId()] = true + } + } + for taskID, found := range taskIDs { + assert.True(t, found, "Task %s from namespace not found in response", taskID) + } + + // List tasks by a non-existent namespace - should return 0 tasks + resp2, err := runSrv.ListTasks(ctxWithUser(), &apiv2beta1.ListTasksRequest{ + ParentFilter: &apiv2beta1.ListTasksRequest_Namespace{ + Namespace: "non-existent-namespace", + }, + PageSize: 50, + }) + assert.NoError(t, err) + assert.Equal(t, int32(0), resp2.GetTotalSize()) + assert.Equal(t, 0, len(resp2.GetTasks())) +} + +func TestListTasks_MutualExclusivity(t *testing.T) { + // Test in single-user mode first + viper.Set(common.MultiUserMode, "false") + t.Cleanup(func() { viper.Set(common.MultiUserMode, "false") }) + + clients, manager, runID := seedOneRun(t) + defer clients.Close() + + runSrv := createRunServer(manager) + + // Create a parent task + parent, err := runSrv.CreateTask(context.Background(), &apiv2beta1.CreateTaskRequest{ + Task: &apiv2beta1.PipelineTaskDetail{ + RunId: runID, + Name: "parent", + }, + }) + assert.NoError(t, err) + + // Test: No filter provided - should succeed in single-user mode (lists all tasks) + _, err = runSrv.ListTasks(context.Background(), &apiv2beta1.ListTasksRequest{ + PageSize: 50, + }) + assert.NoError(t, err) + + // Test: Providing run_id succeeds + _, err = runSrv.ListTasks(context.Background(), &apiv2beta1.ListTasksRequest{ + ParentFilter: &apiv2beta1.ListTasksRequest_RunId{ + RunId: runID, + }, + PageSize: 50, + }) + assert.NoError(t, err) + + // Test: Providing parent_id succeeds + _, err = runSrv.ListTasks(context.Background(), &apiv2beta1.ListTasksRequest{ + ParentFilter: &apiv2beta1.ListTasksRequest_ParentId{ + ParentId: parent.GetTaskId(), + }, + PageSize: 50, + }) + assert.NoError(t, err) + + // Test: Providing namespace succeeds + _, err = runSrv.ListTasks(context.Background(), &apiv2beta1.ListTasksRequest{ + ParentFilter: &apiv2beta1.ListTasksRequest_Namespace{ + Namespace: "some-namespace", + }, + PageSize: 50, + }) + assert.NoError(t, err) + + // Now test multi-user mode + viper.Set(common.MultiUserMode, "true") + + // Test: No filter provided - should fail in multi-user mode + _, err = runSrv.ListTasks(ctxWithUser(), &apiv2beta1.ListTasksRequest{ + PageSize: 50, + }) + assert.Error(t, err) + assert.Contains(t, err.Error(), "Either run_id, parent_id, or namespace is required") +} + +func TestListTasks_EmptyNamespaceSingleUserMode(t *testing.T) { + // Ensure we're in single-user mode + viper.Set(common.MultiUserMode, "false") + t.Cleanup(func() { viper.Set(common.MultiUserMode, "false") }) + + clients, manager, runID := seedOneRun(t) + defer clients.Close() + + runSrv := createRunServer(manager) + + // Create some tasks + task1, err := runSrv.CreateTask(context.Background(), &apiv2beta1.CreateTaskRequest{ + Task: &apiv2beta1.PipelineTaskDetail{ + RunId: runID, + Name: "task-1", + State: apiv2beta1.PipelineTaskDetail_RUNNING, + }, + }) + assert.NoError(t, err) + + task2, err := runSrv.CreateTask(context.Background(), &apiv2beta1.CreateTaskRequest{ + Task: &apiv2beta1.PipelineTaskDetail{ + RunId: runID, + Name: "task-2", + State: apiv2beta1.PipelineTaskDetail_SUCCEEDED, + }, + }) + assert.NoError(t, err) + + // Test: In single-user mode, empty namespace should list all tasks + resp, err := runSrv.ListTasks(context.Background(), &apiv2beta1.ListTasksRequest{ + ParentFilter: &apiv2beta1.ListTasksRequest_Namespace{ + Namespace: "", // Empty namespace in single-user mode + }, + PageSize: 50, + }) + assert.NoError(t, err) + assert.GreaterOrEqual(t, int(resp.GetTotalSize()), 2, "Should return at least the 2 tasks we created") + + // Verify our tasks are in the response + taskIDs := map[string]bool{ + task1.GetTaskId(): false, + task2.GetTaskId(): false, + } + for _, task := range resp.GetTasks() { + if _, exists := taskIDs[task.GetTaskId()]; exists { + taskIDs[task.GetTaskId()] = true + } + } + for taskID, found := range taskIDs { + assert.True(t, found, "Task %s should be found when filtering by empty namespace in single-user mode", taskID) + } +} diff --git a/backend/src/apiserver/server/run_server_view_test.go b/backend/src/apiserver/server/run_server_view_test.go new file mode 100644 index 00000000000..d1a343935b7 --- /dev/null +++ b/backend/src/apiserver/server/run_server_view_test.go @@ -0,0 +1,369 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + "context" + "fmt" + "testing" + + apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" + "github.com/kubeflow/pipelines/backend/src/apiserver/model" + "github.com/kubeflow/pipelines/backend/src/apiserver/resource" + "github.com/stretchr/testify/assert" + "google.golang.org/grpc/metadata" +) + +// Helper function to create a run with tasks for testing +func initWithRunAndTasks(t *testing.T) (*resource.FakeClientManager, *resource.ResourceManager, *model.Run) { + initEnvVars() + clientManager := resource.NewFakeClientManagerOrFatalV2() + resourceManager := resource.NewResourceManager(clientManager, &resource.ResourceManagerOptions{CollectMetrics: false}) + + ctx := context.Background() + if common.IsMultiUserMode() { + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx = metadata.NewIncomingContext(context.Background(), md) + } + + // Create a run + run := &model.Run{ + DisplayName: "test-run", + Namespace: "ns1", + PipelineSpec: model.PipelineSpec{ + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), + }, + } + createdRun, err := resourceManager.CreateRun(ctx, run) + assert.Nil(t, err) + + // Create some tasks for this run + for i := 1; i <= 3; i++ { + task := &model.Task{ + Namespace: "ns1", + RunUUID: createdRun.UUID, + Name: fmt.Sprintf("task-%d", i), + State: model.TaskStatus(apiv2beta1.PipelineTaskDetail_RUNNING), + } + _, err := clientManager.TaskStore().CreateTask(task) + assert.Nil(t, err) + } + + return clientManager, resourceManager, createdRun +} + +// TestGetRun_DefaultView tests that GetRun with DEFAULT view returns task_count but no tasks +func TestGetRun_DefaultView(t *testing.T) { + clients, manager, createdRun := initWithRunAndTasks(t) + defer clients.Close() + server := createRunServer(manager) + + ctx := context.Background() + if common.IsMultiUserMode() { + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx = metadata.NewIncomingContext(context.Background(), md) + } + + // Get run with DEFAULT view (or unspecified) + defaultView := apiv2beta1.GetRunRequest_DEFAULT + response, err := server.GetRun(ctx, &apiv2beta1.GetRunRequest{ + RunId: createdRun.UUID, + View: &defaultView, + }) + + assert.Nil(t, err) + assert.NotNil(t, response) + assert.Equal(t, createdRun.UUID, response.RunId) + + // Verify task_count is populated + assert.Equal(t, int32(3), response.TaskCount) + + // Verify tasks are NOT populated (or empty) + assert.Nil(t, response.Tasks) +} + +// TestGetRun_DefaultView_Unspecified tests that GetRun with no view parameter returns task_count but no tasks +func TestGetRun_DefaultView_Unspecified(t *testing.T) { + clients, manager, createdRun := initWithRunAndTasks(t) + defer clients.Close() + server := createRunServer(manager) + + ctx := context.Background() + if common.IsMultiUserMode() { + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx = metadata.NewIncomingContext(context.Background(), md) + } + + // Get run without specifying view (should default to DEFAULT behavior) + response, err := server.GetRun(ctx, &apiv2beta1.GetRunRequest{ + RunId: createdRun.UUID, + }) + + assert.Nil(t, err) + assert.NotNil(t, response) + assert.Equal(t, createdRun.UUID, response.RunId) + + // Verify task_count is populated + assert.Equal(t, int32(3), response.TaskCount) + + // Verify tasks are NOT populated + assert.Nil(t, response.Tasks) +} + +// TestGetRun_FullView tests that GetRun with FULL view returns both task_count and full tasks +func TestGetRun_FullView(t *testing.T) { + clients, manager, createdRun := initWithRunAndTasks(t) + defer clients.Close() + server := createRunServer(manager) + + ctx := context.Background() + if common.IsMultiUserMode() { + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx = metadata.NewIncomingContext(context.Background(), md) + } + + // Get run with FULL view + fullView := apiv2beta1.GetRunRequest_FULL + response, err := server.GetRun(ctx, &apiv2beta1.GetRunRequest{ + RunId: createdRun.UUID, + View: &fullView, + }) + + assert.Nil(t, err) + assert.NotNil(t, response) + assert.Equal(t, createdRun.UUID, response.RunId) + + // Verify task_count is populated + assert.Equal(t, int32(3), response.TaskCount) + + // Verify tasks ARE populated with full details + assert.NotNil(t, response.Tasks) + assert.Equal(t, 3, len(response.Tasks)) + + // Verify task details are populated + for _, task := range response.Tasks { + assert.NotEmpty(t, task.TaskId) + assert.NotEmpty(t, task.Name) + assert.Equal(t, createdRun.UUID, task.RunId) + assert.NotNil(t, task.Inputs) + assert.NotNil(t, task.Outputs) + } +} + +// TestListRuns_DefaultView tests that ListRuns with DEFAULT view returns task_count but no tasks +func TestListRuns_DefaultView(t *testing.T) { + clients, manager, createdRun := initWithRunAndTasks(t) + defer clients.Close() + server := createRunServer(manager) + + ctx := context.Background() + if common.IsMultiUserMode() { + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx = metadata.NewIncomingContext(context.Background(), md) + } + + // List runs with DEFAULT view + defaultView := apiv2beta1.ListRunsRequest_DEFAULT + response, err := server.ListRuns(ctx, &apiv2beta1.ListRunsRequest{ + Namespace: "ns1", + View: &defaultView, + }) + + assert.Nil(t, err) + assert.NotNil(t, response) + assert.True(t, len(response.Runs) > 0) + + // Find our created run + var foundRun *apiv2beta1.Run + for _, run := range response.Runs { + if run.RunId == createdRun.UUID { + foundRun = run + break + } + } + + assert.NotNil(t, foundRun) + + // Verify task_count is populated + assert.Equal(t, int32(3), foundRun.TaskCount) + + // Verify tasks are NOT populated + assert.Nil(t, foundRun.Tasks) +} + +// TestListRuns_DefaultView_Unspecified tests that ListRuns with no view parameter returns task_count but no tasks +func TestListRuns_DefaultView_Unspecified(t *testing.T) { + clients, manager, createdRun := initWithRunAndTasks(t) + defer clients.Close() + server := createRunServer(manager) + + ctx := context.Background() + if common.IsMultiUserMode() { + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx = metadata.NewIncomingContext(context.Background(), md) + } + + // List runs without specifying view (should default to DEFAULT behavior) + response, err := server.ListRuns(ctx, &apiv2beta1.ListRunsRequest{ + Namespace: "ns1", + }) + + assert.Nil(t, err) + assert.NotNil(t, response) + assert.True(t, len(response.Runs) > 0) + + // Find our created run + var foundRun *apiv2beta1.Run + for _, run := range response.Runs { + if run.RunId == createdRun.UUID { + foundRun = run + break + } + } + + assert.NotNil(t, foundRun) + + // Verify task_count is populated + assert.Equal(t, int32(3), foundRun.TaskCount) + + // Verify tasks are NOT populated + assert.Nil(t, foundRun.Tasks) +} + +// TestListRuns_FullView tests that ListRuns with FULL view returns both task_count and full tasks +func TestListRuns_FullView(t *testing.T) { + clients, manager, createdRun := initWithRunAndTasks(t) + defer clients.Close() + server := createRunServer(manager) + + ctx := context.Background() + if common.IsMultiUserMode() { + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx = metadata.NewIncomingContext(context.Background(), md) + } + + // List runs with FULL view + fullView := apiv2beta1.ListRunsRequest_FULL + response, err := server.ListRuns(ctx, &apiv2beta1.ListRunsRequest{ + Namespace: "ns1", + View: &fullView, + }) + + assert.Nil(t, err) + assert.NotNil(t, response) + assert.True(t, len(response.Runs) > 0) + + // Find our created run + var foundRun *apiv2beta1.Run + for _, run := range response.Runs { + if run.RunId == createdRun.UUID { + foundRun = run + break + } + } + + assert.NotNil(t, foundRun) + + // Verify task_count is populated + assert.Equal(t, int32(3), foundRun.TaskCount) + + // Verify tasks ARE populated with full details + assert.NotNil(t, foundRun.Tasks) + assert.Equal(t, 3, len(foundRun.Tasks)) + + // Verify task details are populated + for _, task := range foundRun.Tasks { + assert.NotEmpty(t, task.TaskId) + assert.NotEmpty(t, task.Name) + assert.Equal(t, createdRun.UUID, task.RunId) + assert.NotNil(t, task.Inputs) + assert.NotNil(t, task.Outputs) + } +} + +// TestGetRun_NoTasks tests that GetRun returns task_count of 0 when run has no tasks +func TestGetRun_NoTasks(t *testing.T) { + clients, manager, _ := initWithOneTimeRunV2(t) + defer clients.Close() + server := createRunServer(manager) + + ctx := context.Background() + if common.IsMultiUserMode() { + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx = metadata.NewIncomingContext(context.Background(), md) + } + + // Create a run without tasks + run := &model.Run{ + DisplayName: "test-run-no-tasks", + Namespace: "ns1", + PipelineSpec: model.PipelineSpec{ + WorkflowSpecManifest: model.LargeText(testWorkflow.ToStringForStore()), + }, + } + createdRun, err := manager.CreateRun(ctx, run) + assert.Nil(t, err) + + // Get run with DEFAULT view + defaultView := apiv2beta1.GetRunRequest_DEFAULT + response, err := server.GetRun(ctx, &apiv2beta1.GetRunRequest{ + RunId: createdRun.UUID, + View: &defaultView, + }) + + assert.Nil(t, err) + assert.NotNil(t, response) + + // Verify task_count is 0 + assert.Equal(t, int32(0), response.TaskCount) + + // Verify tasks are not populated + assert.Nil(t, response.Tasks) +} + +// TestTaskCount_AlwaysPopulated tests that task_count is always populated regardless of view +func TestTaskCount_AlwaysPopulated(t *testing.T) { + clients, manager, createdRun := initWithRunAndTasks(t) + defer clients.Close() + server := createRunServer(manager) + + ctx := context.Background() + if common.IsMultiUserMode() { + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx = metadata.NewIncomingContext(context.Background(), md) + } + + // Test with DEFAULT view + defaultView := apiv2beta1.GetRunRequest_DEFAULT + defaultResponse, err := server.GetRun(ctx, &apiv2beta1.GetRunRequest{ + RunId: createdRun.UUID, + View: &defaultView, + }) + assert.Nil(t, err) + assert.Equal(t, int32(3), defaultResponse.TaskCount) + + // Test with FULL view + fullView := apiv2beta1.GetRunRequest_FULL + fullResponse, err := server.GetRun(ctx, &apiv2beta1.GetRunRequest{ + RunId: createdRun.UUID, + View: &fullView, + }) + assert.Nil(t, err) + assert.Equal(t, int32(3), fullResponse.TaskCount) + + // Both should have the same task count + assert.Equal(t, defaultResponse.TaskCount, fullResponse.TaskCount) +} diff --git a/backend/src/apiserver/server/task_server.go b/backend/src/apiserver/server/task_server.go deleted file mode 100644 index f8a61478415..00000000000 --- a/backend/src/apiserver/server/task_server.go +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright 2018 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package server - -import ( - "context" - "strings" - - apiv1beta1 "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" - - api "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" - "github.com/kubeflow/pipelines/backend/src/apiserver/model" - "github.com/kubeflow/pipelines/backend/src/apiserver/resource" - "github.com/kubeflow/pipelines/backend/src/common/util" -) - -type TaskServer struct { - resourceManager *resource.ResourceManager - apiv1beta1.UnimplementedTaskServiceServer -} - -// Creates a task. -// Supports v1beta1 behavior. -func (s *TaskServer) CreateTaskV1(ctx context.Context, request *api.CreateTaskRequest) (*api.Task, error) { - err := s.validateCreateTaskRequest(request) - if err != nil { - return nil, util.Wrap(err, "Failed to create a new task due to validation error") - } - - modelTask, err := toModelTask(request.GetTask()) - if err != nil { - return nil, util.Wrap(err, "Failed to create a new task due to conversion error") - } - - task, err := s.resourceManager.CreateTask(modelTask) - if err != nil { - return nil, util.Wrap(err, "Failed to create a new task") - } - - return toApiTaskV1(task), nil -} - -func (s *TaskServer) validateCreateTaskRequest(request *api.CreateTaskRequest) error { - if request == nil { - return util.NewInvalidInputError("CreatTaskRequest is nil") - } - task := request.GetTask() - - errMustSpecify := func(s string) error { return util.NewInvalidInputError("Invalid task: must specify %s", s) } - - if task.GetId() != "" { - return util.NewInvalidInputError("Invalid task: Id should not be set") - } - if task.GetPipelineName() == "" { - return errMustSpecify("PipelineName") - } - if strings.HasPrefix(task.GetPipelineName(), "namespace/") { - s := strings.SplitN(task.GetPipelineName(), "/", 4) - if len(s) != 4 { - return util.NewInvalidInputError("invalid PipelineName for namespaced pipelines, need to follow 'namespace/${namespace}/pipeline/${pipelineName}': %s", task.GetPipelineName()) - } - namespace := s[1] - if task.GetNamespace() != "" && namespace != task.GetNamespace() { - return util.NewInvalidInputError("the namespace %s extracted from pipelineName is not equal to the namespace %s in task", namespace, task.GetNamespace()) - } - } - if task.GetRunId() == "" { - return errMustSpecify("RunID") - } - if task.GetMlmdExecutionID() == "" { - return errMustSpecify("MlmdExecutionID") - } - if task.GetFingerprint() == "" { - return errMustSpecify("FingerPrint") - } - if task.GetCreatedAt() == nil { - return errMustSpecify("CreatedAt") - } - return nil -} - -// Fetches tasks given query parameters. -// Supports v1beta1 behavior. -func (s *TaskServer) ListTasksV1(ctx context.Context, request *api.ListTasksRequest) ( - *api.ListTasksResponse, error, -) { - opts, err := validatedListOptions(&model.Task{}, request.PageToken, int(request.PageSize), request.SortBy, request.Filter, "v1beta1") - if err != nil { - return nil, util.Wrap(err, "Failed to create list options") - } - - filterContext, err := validateFilterV1(request.ResourceReferenceKey) - if err != nil { - return nil, util.Wrap(err, "Validating filter failed") - } - - tasks, total_size, nextPageToken, err := s.resourceManager.ListTasks(filterContext, opts) - if err != nil { - return nil, util.Wrap(err, "List tasks failed") - } - return &api.ListTasksResponse{ - Tasks: toApiTasksV1(tasks), - TotalSize: int32(total_size), - NextPageToken: nextPageToken, - }, - nil -} - -func NewTaskServer(resourceManager *resource.ResourceManager) *TaskServer { - return &TaskServer{resourceManager: resourceManager} -} diff --git a/backend/src/apiserver/storage/artifact_store.go b/backend/src/apiserver/storage/artifact_store.go new file mode 100644 index 00000000000..9a1f1cf0cd2 --- /dev/null +++ b/backend/src/apiserver/storage/artifact_store.go @@ -0,0 +1,293 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package storage provides the storage layer for the API server. +package storage + +import ( + "database/sql" + "fmt" + + sq "github.com/Masterminds/squirrel" + "github.com/golang/glog" + "github.com/kubeflow/pipelines/backend/src/apiserver/list" + "github.com/kubeflow/pipelines/backend/src/apiserver/model" + "github.com/kubeflow/pipelines/backend/src/common/util" +) + +const artifactTableName = "artifacts" + +var artifactColumns = []string{ + "UUID", + "Namespace", + "Type", + "URI", + "Name", + "Description", + "CreatedAtInSec", + "LastUpdateInSec", + "Metadata", + "NumberValue", +} + +// Ensure that ClientManager implements the resource.ClientManagerInterface interface. +var _ ArtifactStoreInterface = &ArtifactStore{} + +type ArtifactStoreInterface interface { + // Create an artifact entry in the database. + CreateArtifact(artifact *model.Artifact) (*model.Artifact, error) + + // Fetches an artifact with a given id. + GetArtifact(id string) (*model.Artifact, error) + + // Fetches artifacts for given filtering and listing options. + ListArtifacts(filterContext *model.FilterContext, opts *list.Options) ([]*model.Artifact, int, string, error) +} + +type ArtifactStore struct { + db *DB + time util.TimeInterface + uuid util.UUIDGeneratorInterface +} + +// NewArtifactStore creates a new ArtifactStore. +func NewArtifactStore(db *DB, time util.TimeInterface, uuid util.UUIDGeneratorInterface) *ArtifactStore { + return &ArtifactStore{ + db: db, + time: time, + uuid: uuid, + } +} + +func (s *ArtifactStore) CreateArtifact(artifact *model.Artifact) (*model.Artifact, error) { + // Set up UUID for artifact. + newArtifact := *artifact + id, err := s.uuid.NewRandom() + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to create an artifact id") + } + newArtifact.UUID = id.String() + + // Set creation timestamps + now := s.time.Now().Unix() + newArtifact.CreatedAtInSec = now + newArtifact.LastUpdateInSec = now + + // Convert metadata to JSON string for storage + metadataJSON, err := newArtifact.Metadata.Value() + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to marshal artifact metadata") + } + + sql, args, err := sq. + Insert(artifactTableName). + SetMap( + sq.Eq{ + "UUID": newArtifact.UUID, + "Namespace": newArtifact.Namespace, + "Type": newArtifact.Type, + "URI": newArtifact.URI, + "Name": newArtifact.Name, + "Description": newArtifact.Description, + "CreatedAtInSec": newArtifact.CreatedAtInSec, + "LastUpdateInSec": newArtifact.LastUpdateInSec, + "Metadata": metadataJSON, + "NumberValue": newArtifact.NumberValue, + }, + ). + ToSql() + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to create query to insert artifact to artifact table: %v", + err.Error()) + } + + _, err = s.db.Exec(sql, args...) + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to add artifact to artifact table: %v", + err.Error()) + } + + return &newArtifact, nil +} + +func (s *ArtifactStore) scanRows(rows *sql.Rows) ([]*model.Artifact, error) { + var artifacts []*model.Artifact + for rows.Next() { + var uuid, namespace string + var name, uri, description sql.NullString + var artifactType int32 + var createdAtInSec, lastUpdateInSec int64 + var metadataBytes []byte + var numberValue sql.NullFloat64 + + err := rows.Scan( + &uuid, + &namespace, + &artifactType, + &uri, + &name, + &description, + &createdAtInSec, + &lastUpdateInSec, + &metadataBytes, + &numberValue, + ) + if err != nil { + return artifacts, err + } + + // Parse metadata JSON + var metadata model.JSONData + if metadataBytes != nil { + err = metadata.Scan(metadataBytes) + if err != nil { + return artifacts, util.NewInternalServerError(err, "Failed to parse artifact metadata") + } + } + + artifact := &model.Artifact{ + UUID: uuid, + Namespace: namespace, + Type: model.ArtifactType(artifactType), + Name: name.String, + Description: description.String, + CreatedAtInSec: createdAtInSec, + LastUpdateInSec: lastUpdateInSec, + Metadata: metadata, + } + if numberValue.Valid { + artifact.NumberValue = &numberValue.Float64 + } + + if uri.Valid { + artifact.URI = &uri.String + } + artifacts = append(artifacts, artifact) + } + return artifacts, nil +} + +func (s *ArtifactStore) ListArtifacts(filterContext *model.FilterContext, opts *list.Options) ([]*model.Artifact, int, string, error) { + errorF := func(err error) ([]*model.Artifact, int, string, error) { + return nil, 0, "", util.NewInternalServerError(err, "Failed to list artifacts: %v", err) + } + + // SQL for getting the filtered and paginated rows + sqlBuilder := sq.Select(artifactColumns...).From(artifactTableName) + + // Apply namespace filtering if provided + if filterContext != nil && filterContext.ReferenceKey != nil { + if filterContext.Type == model.NamespaceResourceType { + sqlBuilder = sqlBuilder.Where(sq.Eq{"Namespace": filterContext.ID}) + } + } + + sqlBuilder = opts.AddFilterToSelect(sqlBuilder) + + rowsSQL, rowsArgs, err := opts.AddPaginationToSelect(sqlBuilder).ToSql() + if err != nil { + return errorF(err) + } + + // SQL for getting total size + countBuilder := sq.Select("count(*)").From(artifactTableName) + if filterContext != nil && filterContext.ReferenceKey != nil { + if filterContext.Type == model.NamespaceResourceType { + countBuilder = countBuilder.Where(sq.Eq{"Namespace": filterContext.ID}) + } + } + sizeSQL, sizeArgs, err := opts.AddFilterToSelect(countBuilder).ToSql() + if err != nil { + return errorF(err) + } + + // Use a transaction to make sure we're returning the totalSize of the same rows queried + tx, err := s.db.Begin() + if err != nil { + glog.Errorf("Failed to start transaction to list artifacts") + return errorF(err) + } + + rows, err := tx.Query(rowsSQL, rowsArgs...) + if err != nil { + tx.Rollback() + return errorF(err) + } + if err := rows.Err(); err != nil { + tx.Rollback() + return errorF(err) + } + artifacts, err := s.scanRows(rows) + if err != nil { + tx.Rollback() + return errorF(err) + } + defer rows.Close() + + sizeRow, err := tx.Query(sizeSQL, sizeArgs...) + if err != nil { + tx.Rollback() + return errorF(err) + } + if err := sizeRow.Err(); err != nil { + tx.Rollback() + return errorF(err) + } + totalSize, err := list.ScanRowToTotalSize(sizeRow) + if err != nil { + tx.Rollback() + return errorF(err) + } + defer sizeRow.Close() + + err = tx.Commit() + if err != nil { + glog.Errorf("Failed to commit transaction to list artifacts") + return errorF(err) + } + + if len(artifacts) <= opts.PageSize { + return artifacts, totalSize, "", nil + } + + npt, err := opts.NextPageToken(artifacts[opts.PageSize]) + return artifacts[:opts.PageSize], totalSize, npt, err +} + +func (s *ArtifactStore) GetArtifact(id string) (*model.Artifact, error) { + sql, args, err := sq. + Select(artifactColumns...). + From(artifactTableName). + Where(sq.Eq{"UUID": id}). + Limit(1).ToSql() + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to create query to get artifact: %v", err.Error()) + } + + r, err := s.db.Query(sql, args...) + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to get artifact: %v", err.Error()) + } + defer r.Close() + + artifacts, err := s.scanRows(r) + if err != nil || len(artifacts) > 1 { + return nil, util.NewInternalServerError(err, "Failed to get artifact: %v", err.Error()) + } + if len(artifacts) == 0 { + return nil, util.NewResourceNotFoundError("artifact", fmt.Sprint(id)) + } + + return artifacts[0], nil +} diff --git a/backend/src/apiserver/storage/artifact_store_test.go b/backend/src/apiserver/storage/artifact_store_test.go new file mode 100644 index 00000000000..41d33bf508b --- /dev/null +++ b/backend/src/apiserver/storage/artifact_store_test.go @@ -0,0 +1,172 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package storage + +import ( + "testing" + + apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/filter" + "github.com/kubeflow/pipelines/backend/src/apiserver/list" + "github.com/kubeflow/pipelines/backend/src/apiserver/model" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/stretchr/testify/assert" + "google.golang.org/grpc/codes" +) + +func strPTR(s string) *string { return &s } + +const ( + artifactUUID1 = "a23e4567-e89b-12d3-a456-426655441011" + artifactUUID2 = "a23e4567-e89b-12d3-a456-426655441012" + artifactUUID3 = "a23e4567-e89b-12d3-a456-426655441013" +) + +// initializeArtifactStore sets up a fake DB and returns an ArtifactStore ready for testing. +func initializeArtifactStore() (*DB, *ArtifactStore) { + db := NewFakeDBOrFatal() + fakeTime := util.NewFakeTimeForEpoch() + store := NewArtifactStore(db, fakeTime, util.NewFakeUUIDGeneratorOrFatal(artifactUUID1, nil)) + return db, store +} + +func TestArtifactAPIFieldMap(t *testing.T) { + for _, modelField := range (&model.Artifact{}).APIToModelFieldMap() { + assert.Contains(t, artifactColumns, modelField) + } +} + +func TestCreateArtifact_Success(t *testing.T) { + db, store := initializeArtifactStore() + defer db.Close() + + art := &model.Artifact{ + Namespace: "ns1", + Type: model.ArtifactType(apiv2beta1.Artifact_Artifact), + URI: strPTR("s3://bucket/path/file"), + Name: "model.pt", + Metadata: model.JSONData(map[string]interface{}{"k": "v"}), + } + + created, err := store.CreateArtifact(art) + assert.NoError(t, err) + assert.Equal(t, artifactUUID1, created.UUID) + assert.Greater(t, created.CreatedAtInSec, int64(0)) + assert.Equal(t, created.CreatedAtInSec, created.LastUpdateInSec) + assert.Equal(t, "ns1", created.Namespace) + assert.Equal(t, model.ArtifactType(apiv2beta1.Artifact_Artifact), created.Type) + assert.Equal(t, "s3://bucket/path/file", *created.URI) + assert.Equal(t, "model.pt", created.Name) + assert.Equal(t, "v", created.Metadata["k"]) + + // fetch back + fetched, err := store.GetArtifact(created.UUID) + assert.NoError(t, err) + assert.Equal(t, created.UUID, fetched.UUID) + assert.Equal(t, created.CreatedAtInSec, fetched.CreatedAtInSec) + assert.Equal(t, created.LastUpdateInSec, fetched.LastUpdateInSec) + assert.Equal(t, created.Namespace, fetched.Namespace) + assert.Equal(t, created.Type, fetched.Type) + assert.Equal(t, created.URI, fetched.URI) + assert.Equal(t, created.Name, fetched.Name) + assert.Equal(t, created.Metadata, fetched.Metadata) +} + +func TestGetArtifact_NotFound(t *testing.T) { + db, store := initializeArtifactStore() + defer db.Close() + _, err := store.GetArtifact(artifactUUID1) + assert.Equal(t, codes.NotFound, err.(*util.UserError).ExternalStatusCode()) +} + +func TestListArtifacts_BasicFiltersAndPagination(t *testing.T) { + db, store := initializeArtifactStore() + defer db.Close() + + // Seed 3 artifacts across 2 namespaces and types + store.uuid = util.NewFakeUUIDGeneratorOrFatal(artifactUUID1, nil) + _, err := store.CreateArtifact(&model.Artifact{ + Namespace: "ns1", + Type: 1, + URI: strPTR("u1"), + Name: "a1", + Metadata: map[string]interface{}{"m": 1}, + }) + assert.NoError(t, err) + + store.uuid = util.NewFakeUUIDGeneratorOrFatal(artifactUUID2, nil) + _, err = store.CreateArtifact(&model.Artifact{ + Namespace: "ns1", + Type: 2, + URI: strPTR("u2"), + Name: "a2", + Metadata: map[string]interface{}{"m": 2}, + }) + assert.NoError(t, err) + + store.uuid = util.NewFakeUUIDGeneratorOrFatal(artifactUUID3, nil) + _, err = store.CreateArtifact(&model.Artifact{ + Namespace: "ns2", + Type: 1, + URI: strPTR("u3"), + Name: "a3", + Metadata: map[string]interface{}{"m": 3}, + }) + assert.NoError(t, err) + + // List all + opts, _ := list.NewOptions(&model.Artifact{}, 10, "", nil) + all, total, npt, err := store.ListArtifacts(&model.FilterContext{}, opts) + assert.NoError(t, err) + assert.Equal(t, 3, len(all)) + assert.Equal(t, 3, total) + assert.Equal(t, "", npt) + + // Filter by Namespace + opts2, _ := list.NewOptions(&model.Artifact{}, 10, "", nil) + nsFiltered, total2, _, err := store.ListArtifacts(&model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.NamespaceResourceType, ID: "ns1"}}, opts2) + assert.NoError(t, err) + assert.Equal(t, 2, len(nsFiltered)) + assert.Equal(t, 2, total2) + + // Filter predicate on Type equals 1 + fProto := &apiv2beta1.Filter{Predicates: []*apiv2beta1.Predicate{ + {Key: "type", Operation: apiv2beta1.Predicate_EQUALS, Value: &apiv2beta1.Predicate_IntValue{IntValue: 1}}, + }} + f, err := filter.New(fProto) + assert.NoError(t, err) + opts3, err := list.NewOptions(&model.Artifact{}, 10, "", f) + assert.NoError(t, err) + filtered, total3, _, err := store.ListArtifacts(&model.FilterContext{}, opts3) + assert.NoError(t, err) + assert.Equal(t, 2, len(filtered)) + assert.Equal(t, 2, total3) + + // Pagination page size 2 with token + opts4, _ := list.NewOptions(&model.Artifact{}, 2, "", nil) + page1, total4, token, err := store.ListArtifacts(&model.FilterContext{}, opts4) + assert.NoError(t, err) + assert.Equal(t, 2, len(page1)) + assert.Equal(t, 3, total4) + assert.NotEqual(t, "", token) + + opts5, err := list.NewOptionsFromToken(token, 2) + assert.NoError(t, err) + page2, total5, token2, err := store.ListArtifacts(&model.FilterContext{}, opts5) + assert.NoError(t, err) + assert.Equal(t, 1, len(page2)) + assert.Equal(t, 3, total5) + assert.Equal(t, "", token2) +} diff --git a/backend/src/apiserver/storage/artifact_task_store.go b/backend/src/apiserver/storage/artifact_task_store.go new file mode 100644 index 00000000000..cd1bc1decf9 --- /dev/null +++ b/backend/src/apiserver/storage/artifact_task_store.go @@ -0,0 +1,374 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package storage contains the implementation of the storage interface. +package storage + +import ( + "database/sql" + "fmt" + + sq "github.com/Masterminds/squirrel" + "github.com/golang/glog" + "github.com/kubeflow/pipelines/backend/src/apiserver/list" + "github.com/kubeflow/pipelines/backend/src/apiserver/model" + "github.com/kubeflow/pipelines/backend/src/common/util" +) + +const artifactTaskTableName = "artifact_tasks" + +var artifactTaskColumns = []string{ + "artifact_tasks.UUID", + "artifact_tasks.ArtifactID", + "artifact_tasks.TaskID", + "artifact_tasks.Type", + "artifact_tasks.RunUUID", + "artifact_tasks.Producer", + "artifact_tasks.ArtifactKey", +} + +type ArtifactTaskStoreInterface interface { + // CreateArtifactTask Create an artifact-task relationship entry in the database. + CreateArtifactTask(artifactTask *model.ArtifactTask) (*model.ArtifactTask, error) + + // CreateArtifactTasks Create multiple artifact-task relationships in a single transaction. + CreateArtifactTasks(artifactTasks []*model.ArtifactTask) ([]*model.ArtifactTask, error) + + // GetArtifactTask Fetches an artifact-task relationship with a given id. + GetArtifactTask(id string) (*model.ArtifactTask, error) + + // ListArtifactTasks Fetches artifact-task relationships for given filtering and listing options. + // filterContexts supports multiple filters: ArtifactID, TaskID, RunUUID + // ioType optionally filters by IOType (pass nil to skip filtering by type) + ListArtifactTasks(filterContexts []*model.FilterContext, ioType *model.IOType, opts *list.Options) ([]*model.ArtifactTask, int, string, error) +} + +type ArtifactTaskStore struct { + db *DB + uuid util.UUIDGeneratorInterface +} + +// NewArtifactTaskStore creates a new ArtifactTaskStore. +func NewArtifactTaskStore(db *DB, uuid util.UUIDGeneratorInterface) *ArtifactTaskStore { + return &ArtifactTaskStore{ + db: db, + uuid: uuid, + } +} + +func (s *ArtifactTaskStore) CreateArtifactTask(artifactTask *model.ArtifactTask) (*model.ArtifactTask, error) { + // Set up UUID for artifact-task relationship. + newArtifactTask := *artifactTask + id, err := s.uuid.NewRandom() + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to create an artifact-task id") + } + newArtifactTask.UUID = id.String() + + // Serialize Producer to JSON if present + producerValue, err := newArtifactTask.Producer.Value() + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to marshal producer JSON: %v", err.Error()) + } + + sql, args, err := sq. + Insert(artifactTaskTableName). + SetMap( + sq.Eq{ + "UUID": newArtifactTask.UUID, + "ArtifactID": newArtifactTask.ArtifactID, + "TaskID": newArtifactTask.TaskID, + "Type": newArtifactTask.Type, + "RunUUID": newArtifactTask.RunUUID, + "Producer": producerValue, + "ArtifactKey": newArtifactTask.ArtifactKey, + }, + ). + ToSql() + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to create query to insert artifact-task to artifact_tasks table: %v", + err.Error()) + } + + _, err = s.db.Exec(sql, args...) + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to add artifact-task to artifact_tasks table: %v", + err.Error()) + } + + return &newArtifactTask, nil +} + +func (s *ArtifactTaskStore) CreateArtifactTasks(artifactTasks []*model.ArtifactTask) ([]*model.ArtifactTask, error) { + if len(artifactTasks) == 0 { + return []*model.ArtifactTask{}, nil + } + + tx, err := s.db.Begin() + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to start transaction for creating artifact-tasks") + } + defer tx.Rollback() + + var newArtifactTasks []*model.ArtifactTask + for _, artifactTask := range artifactTasks { + newArtifactTask := *artifactTask + id, err := s.uuid.NewRandom() + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to create an artifact-task id") + } + newArtifactTask.UUID = id.String() + + // Serialize Producer to JSON if present + producerValue, err := newArtifactTask.Producer.Value() + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to marshal producer JSON: %v", err.Error()) + } + + toSQL, args, err := sq. + Insert(artifactTaskTableName). + SetMap( + sq.Eq{ + "UUID": newArtifactTask.UUID, + "ArtifactID": newArtifactTask.ArtifactID, + "TaskID": newArtifactTask.TaskID, + "Type": newArtifactTask.Type, + "RunUUID": newArtifactTask.RunUUID, + "Producer": producerValue, + "ArtifactKey": newArtifactTask.ArtifactKey, + }, + ). + ToSql() + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to create query to insert artifact-task: %v", err.Error()) + } + + _, err = tx.Exec(toSQL, args...) + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to add artifact-task: %v", err.Error()) + } + + newArtifactTasks = append(newArtifactTasks, &newArtifactTask) + } + + err = tx.Commit() + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to commit transaction for creating artifact-tasks") + } + + return newArtifactTasks, nil +} + +func (s *ArtifactTaskStore) scanRows(rows *sql.Rows) ([]*model.ArtifactTask, error) { + var artifactTasks []*model.ArtifactTask + for rows.Next() { + var uuid, artifactID, taskID string + var runUUID, key string + var ioType int32 + var producer model.JSONData + + err := rows.Scan( + &uuid, + &artifactID, + &taskID, + &ioType, + &runUUID, + &producer, + &key, + ) + if err != nil { + return artifactTasks, err + } + + artifactTask := &model.ArtifactTask{ + UUID: uuid, + ArtifactID: artifactID, + TaskID: taskID, + Type: model.IOType(ioType), + RunUUID: runUUID, + Producer: producer, + ArtifactKey: key, + } + artifactTasks = append(artifactTasks, artifactTask) + } + return artifactTasks, nil +} + +// applyFilterContextsToQuery applies multiple filter contexts to the query builder +// Supports filtering by multiple artifact_ids, task_ids, and run_ids simultaneously +func (s *ArtifactTaskStore) applyFilterContextsToQuery(sqlBuilder sq.SelectBuilder, filterContexts []*model.FilterContext) sq.SelectBuilder { + var artifactIDs []string + var taskIDs []string + var runIDs []string + + // Collect all filter values by type + for _, filterContext := range filterContexts { + if filterContext == nil || filterContext.ReferenceKey == nil { + continue + } + + switch filterContext.Type { + case model.ArtifactResourceType: + artifactIDs = append(artifactIDs, filterContext.ID) + case model.TaskResourceType: + taskIDs = append(taskIDs, filterContext.ID) + case model.RunResourceType: + runIDs = append(runIDs, filterContext.ID) + } + } + + // Apply artifact ID filters (OR within artifact IDs) + if len(artifactIDs) > 0 { + if len(artifactIDs) == 1 { + sqlBuilder = sqlBuilder.Where(sq.Eq{"artifact_tasks.ArtifactID": artifactIDs[0]}) + } else { + sqlBuilder = sqlBuilder.Where(sq.Eq{"artifact_tasks.ArtifactID": artifactIDs}) + } + } + + // Apply task ID filters (OR within task IDs) + if len(taskIDs) > 0 { + if len(taskIDs) == 1 { + sqlBuilder = sqlBuilder.Where(sq.Eq{"artifact_tasks.TaskID": taskIDs[0]}) + } else { + sqlBuilder = sqlBuilder.Where(sq.Eq{"artifact_tasks.TaskID": taskIDs}) + } + } + + // Apply run ID filters (OR within run IDs) now directly on artifact_tasks + if len(runIDs) > 0 { + if len(runIDs) == 1 { + sqlBuilder = sqlBuilder.Where(sq.Eq{"artifact_tasks.RunUUID": runIDs[0]}) + } else { + sqlBuilder = sqlBuilder.Where(sq.Eq{"artifact_tasks.RunUUID": runIDs}) + } + } + + return sqlBuilder +} + +func (s *ArtifactTaskStore) ListArtifactTasks(filterContexts []*model.FilterContext, ioType *model.IOType, opts *list.Options) ([]*model.ArtifactTask, int, string, error) { + errorF := func(err error) ([]*model.ArtifactTask, int, string, error) { + return nil, 0, "", util.NewInternalServerError(err, "Failed to list artifact-tasks: %v", err) + } + + // SQL for getting the filtered and paginated rows + sqlBuilder := sq.Select(artifactTaskColumns...).From(artifactTaskTableName) + sqlBuilder = s.applyFilterContextsToQuery(sqlBuilder, filterContexts) + + // Apply IOType filter if provided + if ioType != nil { + sqlBuilder = sqlBuilder.Where(sq.Eq{"artifact_tasks.Type": *ioType}) + } + + sqlBuilder = opts.AddFilterToSelect(sqlBuilder) + + rowsSQL, rowsArgs, err := opts.AddPaginationToSelect(sqlBuilder).ToSql() + if err != nil { + return errorF(err) + } + + // SQL for getting total size + countBuilder := sq.Select("count(*)").From(artifactTaskTableName) + countBuilder = s.applyFilterContextsToQuery(countBuilder, filterContexts) + + // Apply IOType filter if provided + if ioType != nil { + countBuilder = countBuilder.Where(sq.Eq{"artifact_tasks.Type": *ioType}) + } + + sizeSQL, sizeArgs, err := opts.AddFilterToSelect(countBuilder).ToSql() + if err != nil { + return errorF(err) + } + + // Use a transaction to make sure we're returning the totalSize of the same rows queried + tx, err := s.db.Begin() + if err != nil { + glog.Errorf("Failed to start transaction to list artifact-tasks") + return errorF(err) + } + + rows, err := tx.Query(rowsSQL, rowsArgs...) + if err != nil { + tx.Rollback() + return errorF(err) + } + if err := rows.Err(); err != nil { + tx.Rollback() + return errorF(err) + } + artifactTasks, err := s.scanRows(rows) + if err != nil { + tx.Rollback() + return errorF(err) + } + defer rows.Close() + + sizeRow, err := tx.Query(sizeSQL, sizeArgs...) + if err != nil { + tx.Rollback() + return errorF(err) + } + if err := sizeRow.Err(); err != nil { + tx.Rollback() + return errorF(err) + } + totalSize, err := list.ScanRowToTotalSize(sizeRow) + if err != nil { + tx.Rollback() + return errorF(err) + } + defer sizeRow.Close() + + err = tx.Commit() + if err != nil { + glog.Errorf("Failed to commit transaction to list artifact-tasks") + return errorF(err) + } + + if len(artifactTasks) <= opts.PageSize { + return artifactTasks, totalSize, "", nil + } + + npt, err := opts.NextPageToken(artifactTasks[opts.PageSize]) + return artifactTasks[:opts.PageSize], totalSize, npt, err +} + +func (s *ArtifactTaskStore) GetArtifactTask(id string) (*model.ArtifactTask, error) { + sql, args, err := sq. + Select(artifactTaskColumns...). + From(artifactTaskTableName). + Where(sq.Eq{"UUID": id}). + Limit(1).ToSql() + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to create query to get artifact-task: %v", err.Error()) + } + + r, err := s.db.Query(sql, args...) + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to get artifact-task: %v", err.Error()) + } + defer r.Close() + + artifactTasks, err := s.scanRows(r) + if err != nil || len(artifactTasks) > 1 { + return nil, util.NewInternalServerError(err, "Failed to get artifact-task: %v", err.Error()) + } + if len(artifactTasks) == 0 { + return nil, util.NewResourceNotFoundError("artifact-task", fmt.Sprint(id)) + } + + return artifactTasks[0], nil +} diff --git a/backend/src/apiserver/storage/artifact_task_store_test.go b/backend/src/apiserver/storage/artifact_task_store_test.go new file mode 100644 index 00000000000..692ca47f345 --- /dev/null +++ b/backend/src/apiserver/storage/artifact_task_store_test.go @@ -0,0 +1,504 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package storage + +import ( + "fmt" + "testing" + + apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/list" + "github.com/kubeflow/pipelines/backend/src/apiserver/model" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/stretchr/testify/assert" +) + +const ( + linkUUID1 = "123e4567-e89b-12d3-a456-426655441011" + linkUUID2 = "123e4567-e89b-12d3-a456-426655441012" + linkUUID3 = "123e4567-e89b-12d3-a456-426655441013" + artifactID1 = "123e4567-e89b-12d3-a456-426655441013" + artifactID2 = "123e4567-e89b-12d3-a456-426655441014" + taskID1 = "123e4567-e89b-12d3-a456-426655441015" + taskID2 = "123e4567-e89b-12d3-a456-426655441016" + runID1 = "123e4567-e89b-12d3-a456-426655441017" + runID2 = "123e4567-e89b-12d3-a456-426655441018" +) + +// initializeArtifactTaskDeps sets up a fake DB and returns stores needed for artifact-task tests. +func initializeArtifactTaskDeps() (*DB, *ArtifactStore, *TaskStore, *RunStore, *ArtifactTaskStore) { + db := NewFakeDBOrFatal() + fakeTime := util.NewFakeTimeForEpoch() + + artifactStore := NewArtifactStore(db, fakeTime, util.NewFakeUUIDGeneratorOrFatal(artifactID1, nil)) + taskStore := NewTaskStore(db, fakeTime, util.NewFakeUUIDGeneratorOrFatal(taskID1, nil)) + runStore := NewRunStore(db, fakeTime) + linkStore := NewArtifactTaskStore(db, util.NewFakeUUIDGeneratorOrFatal(linkUUID1, nil)) + + // Seed runs to satisfy Task FK + _, _ = runStore.CreateRun(&model.Run{UUID: runID1, ExperimentId: "exp-1", K8SName: "r1", DisplayName: "r1", StorageState: model.StorageStateAvailable, Namespace: "ns1", RunDetails: model.RunDetails{CreatedAtInSec: 1, ScheduledAtInSec: 1, State: model.RuntimeStateRunning}}) + _, _ = runStore.CreateRun(&model.Run{UUID: runID2, ExperimentId: "exp-2", K8SName: "r2", DisplayName: "r2", StorageState: model.StorageStateAvailable, Namespace: "ns2", RunDetails: model.RunDetails{CreatedAtInSec: 2, ScheduledAtInSec: 2, State: model.RuntimeStateSucceeded}}) + + return db, artifactStore, taskStore, runStore, linkStore +} + +func TestArtifactTaskAPIFieldMap(t *testing.T) { + for _, modelField := range (&model.ArtifactTask{}).APIToModelFieldMap() { + assert.Contains(t, artifactTaskColumns, fmt.Sprintf("%s.%s", artifactTaskTableName, modelField)) + } +} + +func TestCreateArtifactTask_Success(t *testing.T) { + db, artifactStore, taskStore, _, linkStore := initializeArtifactTaskDeps() + defer db.Close() + + // Create an artifact and a task to link + artifactStore.uuid = util.NewFakeUUIDGeneratorOrFatal(artifactID1, nil) + art, err := artifactStore.CreateArtifact(&model.Artifact{ + Namespace: "ns1", + Type: 1, + URI: strPTR("s3://b/p1"), + Name: "a1", + Metadata: map[string]interface{}{"k": "v"}, + }) + assert.NoError(t, err) + + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(taskID1, nil) + task, err := taskStore.CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: runID1, + Name: "t1", + Pods: createTaskPodsAsJSONSlice(createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)), + Fingerprint: "fp1", + State: 1, + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: map[string]interface{}{}, + }) + assert.NoError(t, err) + + // Link as INPUT + linkStore.uuid = util.NewFakeUUIDGeneratorOrFatal(linkUUID1, nil) + link, err := linkStore.CreateArtifactTask(&model.ArtifactTask{ + ArtifactID: art.UUID, + TaskID: task.UUID, + RunUUID: runID1, + Type: model.IOType(apiv2beta1.IOType_COMPONENT_INPUT), + ArtifactKey: "input-key", + }) + assert.NoError(t, err) + assert.Equal(t, linkUUID1, link.UUID) + assert.Equal(t, art.UUID, link.ArtifactID) + assert.Equal(t, task.UUID, link.TaskID) + assert.Equal(t, model.IOType(apiv2beta1.IOType_COMPONENT_INPUT), link.Type) + + // Fetch back + got, err := linkStore.GetArtifactTask(link.UUID) + assert.NoError(t, err) + assert.Equal(t, link.UUID, got.UUID) + assert.Equal(t, link.ArtifactID, got.ArtifactID) + assert.Equal(t, link.TaskID, got.TaskID) + assert.Equal(t, link.Type, got.Type) +} + +func TestListArtifactTasks_Filters(t *testing.T) { + db, artifactStore, taskStore, _, linkStore := initializeArtifactTaskDeps() + defer db.Close() + + // Create 2 artifacts + artifactStore.uuid = util.NewFakeUUIDGeneratorOrFatal(artifactID1, nil) + art1, err := artifactStore.CreateArtifact(&model.Artifact{ + Namespace: "ns1", + Type: 1, + URI: strPTR("u1"), + Name: "a1", + Metadata: map[string]interface{}{}, + }) + assert.NoError(t, err) + artifactStore.uuid = util.NewFakeUUIDGeneratorOrFatal(artifactID2, nil) + art2, err := artifactStore.CreateArtifact(&model.Artifact{ + Namespace: "ns1", + Type: 1, + URI: strPTR("u2"), + Name: "a2", + Metadata: map[string]interface{}{}, + }) + assert.NoError(t, err) + + // Create 2 tasks across 2 runs + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(taskID1, nil) + t1, err := taskStore.CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: runID1, + Name: "t1", + Pods: createTaskPodsAsJSONSlice(createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)), + Fingerprint: "fp-1", + State: 1, + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: map[string]interface{}{}, + }) + assert.NoError(t, err) + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(taskID2, nil) + t2, err := taskStore.CreateTask(&model.Task{ + Namespace: "ns2", + RunUUID: runID2, + Name: "t2", + Pods: createTaskPodsAsJSONSlice(createTaskPod("p2", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)), + Fingerprint: "fp-2", + State: 1, + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: map[string]interface{}{}, + }) + assert.NoError(t, err) + + // Create links: art1<->t1 (INPUT), art2<->t1 (OUTPUT), art2<->t2 (INPUT) + linkStore.uuid = util.NewFakeUUIDGeneratorOrFatal(linkUUID1, nil) + _, err = linkStore.CreateArtifactTask(&model.ArtifactTask{ + ArtifactID: art1.UUID, + TaskID: t1.UUID, + RunUUID: runID1, + Type: model.IOType(apiv2beta1.IOType_COMPONENT_INPUT), + ArtifactKey: "input1", + }) + assert.NoError(t, err) + linkStore.uuid = util.NewFakeUUIDGeneratorOrFatal(linkUUID2, nil) + _, err = linkStore.CreateArtifactTask(&model.ArtifactTask{ + ArtifactID: art2.UUID, + TaskID: t1.UUID, + RunUUID: runID1, + Type: model.IOType(apiv2beta1.IOType_OUTPUT), + ArtifactKey: "output1", + }) + assert.NoError(t, err) + // another link with a fresh random UUID + linkStore.uuid = util.NewUUIDGenerator() + _, err = linkStore.CreateArtifactTask(&model.ArtifactTask{ + ArtifactID: art2.UUID, + TaskID: t2.UUID, + RunUUID: runID2, + Type: model.IOType(apiv2beta1.IOType_COMPONENT_INPUT), + ArtifactKey: "input2", + }) + assert.NoError(t, err) + + opts, _ := list.NewOptions(&model.ArtifactTask{}, 20, "", nil) + + // List all + all, total, npt, err := linkStore.ListArtifactTasks(nil, nil, opts) + assert.NoError(t, err) + assert.Equal(t, 3, len(all)) + assert.Equal(t, 3, total) + assert.Equal(t, "", npt) + + // Filter by task t1 + byTask, totalTask, _, err := linkStore.ListArtifactTasks([]*model.FilterContext{{ReferenceKey: &model.ReferenceKey{Type: model.TaskResourceType, ID: t1.UUID}}}, nil, opts) + assert.NoError(t, err) + assert.Equal(t, 2, len(byTask)) + assert.Equal(t, 2, totalTask) + + // Filter by artifact art2 + byArtifact, totalArt, _, err := linkStore.ListArtifactTasks([]*model.FilterContext{{ReferenceKey: &model.ReferenceKey{Type: model.ArtifactResourceType, ID: art2.UUID}}}, nil, opts) + assert.NoError(t, err) + assert.Equal(t, 2, len(byArtifact)) // art2 is linked twice + assert.Equal(t, 2, totalArt) + + // Filter by run runID2 (should return only links for tasks in run-2) + byRun, totalRun, _, err := linkStore.ListArtifactTasks([]*model.FilterContext{{ReferenceKey: &model.ReferenceKey{Type: model.RunResourceType, ID: runID2}}}, nil, opts) + assert.NoError(t, err) + assert.Equal(t, 1, len(byRun)) + assert.Equal(t, 1, totalRun) + assert.Equal(t, art2.UUID, byRun[0].ArtifactID) + assert.Equal(t, t2.UUID, byRun[0].TaskID) + assert.Equal(t, model.IOType(apiv2beta1.IOType_COMPONENT_INPUT), byRun[0].Type) +} + +func TestListArtifactsForTask_UsingArtifactTasks(t *testing.T) { + db, artifactStore, taskStore, _, linkStore := initializeArtifactTaskDeps() + defer db.Close() + + // Seed artifacts and a single task + artifactStore.uuid = util.NewFakeUUIDGeneratorOrFatal(artifactID1, nil) + art1, err := artifactStore.CreateArtifact(&model.Artifact{ + Namespace: "ns1", + Type: 1, + URI: strPTR("u1"), + Name: "a1", + Metadata: map[string]interface{}{}, + }) + assert.NoError(t, err) + artifactStore.uuid = util.NewFakeUUIDGeneratorOrFatal(artifactID2, nil) + art2, err := artifactStore.CreateArtifact(&model.Artifact{ + Namespace: "ns1", + Type: 1, + URI: strPTR("u2"), + Name: "a2", + Metadata: map[string]interface{}{}, + }) + assert.NoError(t, err) + + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(taskID1, nil) + t1, err := taskStore.CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: runID1, + Name: "t1", + Pods: createTaskPodsAsJSONSlice(createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)), + Fingerprint: "fp-1", + State: 1, + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: map[string]interface{}{}, + }) + assert.NoError(t, err) + + // Link both artifacts to t1 + linkStore.uuid = util.NewFakeUUIDGeneratorOrFatal(linkUUID1, nil) + _, err = linkStore.CreateArtifactTask(&model.ArtifactTask{ + ArtifactID: art1.UUID, + TaskID: t1.UUID, + RunUUID: runID1, + Type: model.IOType(apiv2beta1.IOType_COMPONENT_INPUT), + ArtifactKey: "input1", + }) + assert.NoError(t, err) + linkStore.uuid = util.NewFakeUUIDGeneratorOrFatal(linkUUID2, nil) + _, err = linkStore.CreateArtifactTask(&model.ArtifactTask{ + ArtifactID: art2.UUID, + TaskID: t1.UUID, + RunUUID: runID1, + Type: model.IOType(apiv2beta1.IOType_OUTPUT), + ArtifactKey: "output1", + }) + assert.NoError(t, err) + + // Use artifactTasks to list artifacts for task t1 + opts, _ := list.NewOptions(&model.ArtifactTask{}, 20, "", nil) + rows, total, _, err := linkStore.ListArtifactTasks([]*model.FilterContext{{ReferenceKey: &model.ReferenceKey{Type: model.TaskResourceType, ID: t1.UUID}}}, nil, opts) + assert.NoError(t, err) + assert.Equal(t, 2, total) + + // Collect artifact IDs and verify set equals {art1, art2} + ids := map[string]bool{} + for _, r := range rows { + ids[r.ArtifactID] = true + } + assert.True(t, ids[art1.UUID]) + assert.True(t, ids[art2.UUID]) + assert.Equal(t, 2, len(ids)) +} + +func TestListArtifactTasks_Pagination_PageSizeAndNextPageToken(t *testing.T) { + db, artifactStore, taskStore, _, linkStore := initializeArtifactTaskDeps() + defer db.Close() + + // Seed artifacts and a task + artifactStore.uuid = util.NewFakeUUIDGeneratorOrFatal(artifactID1, nil) + art1, _ := artifactStore.CreateArtifact(&model.Artifact{ + Namespace: "ns1", + Type: 1, + URI: strPTR("u1"), + Name: "a1", + Metadata: map[string]interface{}{}, + }) + + artifactStore.uuid = util.NewFakeUUIDGeneratorOrFatal(artifactID2, nil) + art2, _ := artifactStore.CreateArtifact(&model.Artifact{ + Namespace: "ns1", + Type: 1, + URI: strPTR("u2"), + Name: "a2", + Metadata: map[string]interface{}{}, + }) + + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(taskID1, nil) + t1, _ := taskStore.CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: runID1, + Name: "t1", + Pods: createTaskPodsAsJSONSlice(createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)), + Fingerprint: "fp-1", + State: 1, + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: map[string]interface{}{}, + }) + + // Create 3 links with deterministic UUID order + linkStore.uuid = util.NewFakeUUIDGeneratorOrFatal(linkUUID1, nil) + _, _ = linkStore.CreateArtifactTask(&model.ArtifactTask{ + ArtifactID: art1.UUID, + TaskID: t1.UUID, + RunUUID: runID1, + Type: model.IOType(apiv2beta1.IOType_COMPONENT_INPUT), + ArtifactKey: "input1", + }) + + linkStore.uuid = util.NewFakeUUIDGeneratorOrFatal(linkUUID2, nil) + _, _ = linkStore.CreateArtifactTask(&model.ArtifactTask{ + ArtifactID: art1.UUID, + TaskID: t1.UUID, + RunUUID: runID1, + Type: model.IOType(apiv2beta1.IOType_OUTPUT), + ArtifactKey: "output1", + }) + + linkStore.uuid = util.NewFakeUUIDGeneratorOrFatal(linkUUID3, nil) + _, _ = linkStore.CreateArtifactTask(&model.ArtifactTask{ + ArtifactID: art2.UUID, + TaskID: t1.UUID, + RunUUID: runID1, + Type: model.IOType(apiv2beta1.IOType_COMPONENT_INPUT), + ArtifactKey: "input2", + }) + + // Page 1: size 2 + opts1, _ := list.NewOptions(&model.ArtifactTask{}, 2, "", nil) + page1, total, token1, err := linkStore.ListArtifactTasks(nil, nil, opts1) + assert.NoError(t, err) + assert.Equal(t, 2, len(page1)) + assert.Equal(t, 3, total) + assert.NotEmpty(t, token1) + // should be ordered by UUID asc by default + assert.Equal(t, linkUUID1, page1[0].UUID) + assert.Equal(t, linkUUID2, page1[1].UUID) + + // Page 2: use token + opts2, err := list.NewOptionsFromToken(token1, 2) + assert.NoError(t, err) + page2, total2, token2, err := linkStore.ListArtifactTasks(nil, nil, opts2) + assert.NoError(t, err) + assert.Equal(t, 1, len(page2)) + assert.Equal(t, 3, total2) + assert.Equal(t, "", token2) + assert.Equal(t, linkUUID3, page2[0].UUID) +} + +func TestListArtifactTasks_Pagination_WithFilter(t *testing.T) { + db, artifactStore, taskStore, _, linkStore := initializeArtifactTaskDeps() + defer db.Close() + + // Seed artifacts and tasks + artifactStore.uuid = util.NewFakeUUIDGeneratorOrFatal(artifactID1, nil) + art1, _ := artifactStore.CreateArtifact(&model.Artifact{ + Namespace: "ns1", + Type: 1, + URI: strPTR("u1"), + Name: "a1", + Metadata: map[string]interface{}{}, + }) + + artifactStore.uuid = util.NewFakeUUIDGeneratorOrFatal(artifactID2, nil) + art2, _ := artifactStore.CreateArtifact(&model.Artifact{ + Namespace: "ns1", + Type: 1, + URI: strPTR("u2"), + Name: "a2", + Metadata: map[string]interface{}{}, + }) + + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(taskID1, nil) + t1, _ := taskStore.CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: runID1, + Name: "t1", + Pods: createTaskPodsAsJSONSlice(createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)), + Fingerprint: "fp-1", + State: 1, + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: map[string]interface{}{}, + }) + + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(taskID2, nil) + t2, _ := taskStore.CreateTask(&model.Task{ + Namespace: "ns2", + RunUUID: runID2, + Name: "t2", + Pods: createTaskPodsAsJSONSlice(createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)), + Fingerprint: "fp-2", + State: 1, + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: map[string]interface{}{}, + }) + + // Links: 2 for t1, 1 for t2 + linkStore.uuid = util.NewFakeUUIDGeneratorOrFatal(linkUUID1, nil) + _, _ = linkStore.CreateArtifactTask(&model.ArtifactTask{ + ArtifactID: art1.UUID, + TaskID: t1.UUID, + RunUUID: runID1, + Type: model.IOType(apiv2beta1.IOType_COMPONENT_INPUT), + ArtifactKey: "input1", + }) + + linkStore.uuid = util.NewFakeUUIDGeneratorOrFatal(linkUUID2, nil) + _, _ = linkStore.CreateArtifactTask(&model.ArtifactTask{ + ArtifactID: art2.UUID, + TaskID: t1.UUID, + RunUUID: runID1, + Type: model.IOType(apiv2beta1.IOType_OUTPUT), + ArtifactKey: "output1", + }) + + linkStore.uuid = util.NewFakeUUIDGeneratorOrFatal(linkUUID3, nil) + _, _ = linkStore.CreateArtifactTask(&model.ArtifactTask{ + ArtifactID: art2.UUID, + TaskID: t2.UUID, + RunUUID: runID2, + Type: model.IOType(apiv2beta1.IOType_COMPONENT_INPUT), + ArtifactKey: "input2", + }) + + filterByT1 := []*model.FilterContext{{ReferenceKey: &model.ReferenceKey{Type: model.TaskResourceType, ID: t1.UUID}}} + + // Page size 1 for filtered list + opts1, _ := list.NewOptions(&model.ArtifactTask{}, 1, "", nil) + p1, total1, tok1, err := linkStore.ListArtifactTasks(filterByT1, nil, opts1) + assert.NoError(t, err) + assert.Equal(t, 1, len(p1)) + assert.Equal(t, 2, total1) + assert.NotEmpty(t, tok1) + + // Second page + opts2, err := list.NewOptionsFromToken(tok1, 1) + assert.NoError(t, err) + p2, total2, tok2, err := linkStore.ListArtifactTasks(filterByT1, nil, opts2) + assert.NoError(t, err) + assert.Equal(t, 1, len(p2)) + assert.Equal(t, 2, total2) + assert.Equal(t, "", tok2) + + // Ensure the two pages are disjoint and together contain the two t1 links + ids := map[string]bool{p1[0].UUID: true} + assert.False(t, ids[p2[0].UUID]) + ids[p2[0].UUID] = true + assert.Len(t, ids, 2) +} diff --git a/backend/src/apiserver/storage/db_fake.go b/backend/src/apiserver/storage/db_fake.go index 6596ed5f75d..56d798be939 100644 --- a/backend/src/apiserver/storage/db_fake.go +++ b/backend/src/apiserver/storage/db_fake.go @@ -30,14 +30,16 @@ func NewFakeDB() (*DB, error) { } // Create tables if err := dbInstance.AutoMigrate( + &model.Artifact{}, &model.Experiment{}, &model.Job{}, &model.Pipeline{}, &model.PipelineVersion{}, &model.ResourceReference{}, &model.Run{}, - &model.RunMetric{}, + &model.RunMetricV1{}, &model.Task{}, + &model.ArtifactTask{}, &model.DBStatus{}, &model.DefaultExperiment{}, ); err != nil { diff --git a/backend/src/apiserver/storage/experiment_store_test.go b/backend/src/apiserver/storage/experiment_store_test.go index 5b32943c3c8..01bd3c8e934 100644 --- a/backend/src/apiserver/storage/experiment_store_test.go +++ b/backend/src/apiserver/storage/experiment_store_test.go @@ -26,6 +26,7 @@ import ( "github.com/kubeflow/pipelines/backend/src/common/util" "github.com/pkg/errors" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "google.golang.org/grpc/codes" ) @@ -505,12 +506,14 @@ func TestArchiveAndUnarchiveExperiment(t *testing.T) { }, PipelineSpec: model.PipelineSpec{}, } - runStore.CreateRun(run1) - runStore.CreateRun(run2) + _, err := runStore.CreateRun(run1) + require.NoError(t, err) + _, err = runStore.CreateRun(run2) + require.NoError(t, err) opts, err := list.NewOptions(&model.Run{}, 10, "id", nil) - runs, total_run_size, _, err := runStore.ListRuns(&model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: fakeID}}, opts) + runs, totalRunSize, _, err := runStore.ListRuns(&model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: fakeID}}, opts, false) assert.Nil(t, err) - assert.Equal(t, 2, total_run_size) + assert.Equal(t, 2, totalRunSize) assert.Equal(t, apiv1beta1.Run_STORAGESTATE_AVAILABLE.String(), string(runs[0].StorageState.ToV1())) assert.Equal(t, apiv1beta1.Run_STORAGESTATE_ARCHIVED.String(), string(runs[1].StorageState.ToV1())) assert.Equal(t, apiv2beta1.Run_AVAILABLE.String(), runs[0].StorageState.ToString()) @@ -564,9 +567,9 @@ func TestArchiveAndUnarchiveExperiment(t *testing.T) { assert.Nil(t, err) assert.Equal(t, "ARCHIVED", exp.StorageState.ToString()) opts, err = list.NewOptions(&model.Run{}, 10, "id", nil) - runs, total_run_size, _, err = runStore.ListRuns(&model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: fakeID}}, opts) + runs, totalRunSize, _, err = runStore.ListRuns(&model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: fakeID}}, opts, false) assert.Nil(t, err) - assert.Equal(t, 2, total_run_size) + assert.Equal(t, 2, totalRunSize) assert.Equal(t, apiv1beta1.Run_STORAGESTATE_ARCHIVED.String(), string(runs[0].StorageState.ToV1())) assert.Equal(t, apiv1beta1.Run_STORAGESTATE_ARCHIVED.String(), string(runs[1].StorageState.ToV1())) assert.Equal(t, apiv2beta1.Run_ARCHIVED.String(), runs[0].StorageState.ToString()) @@ -583,9 +586,9 @@ func TestArchiveAndUnarchiveExperiment(t *testing.T) { exp, err = experimentStore.GetExperiment(fakeID) assert.Nil(t, err) assert.Equal(t, "AVAILABLE", exp.StorageState.ToString()) - runs, total_run_size, _, err = runStore.ListRuns(&model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: fakeID}}, opts) + runs, totalRunSize, _, err = runStore.ListRuns(&model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: fakeID}}, opts, false) assert.Nil(t, err) - assert.Equal(t, total_run_size, 2) + assert.Equal(t, totalRunSize, 2) assert.Equal(t, apiv1beta1.Run_STORAGESTATE_ARCHIVED.String(), string(runs[0].StorageState.ToV1())) assert.Equal(t, apiv1beta1.Run_STORAGESTATE_ARCHIVED.String(), string(runs[1].StorageState.ToV1())) assert.Equal(t, apiv2beta1.Run_ARCHIVED.String(), runs[0].StorageState.ToString()) diff --git a/backend/src/apiserver/storage/job_store.go b/backend/src/apiserver/storage/job_store.go index 55f0b4ed812..ac2a84cc376 100644 --- a/backend/src/apiserver/storage/job_store.go +++ b/backend/src/apiserver/storage/job_store.go @@ -220,7 +220,7 @@ func (s *JobStore) addResourceReferences(filteredSelectBuilder sq.SelectBuilder) return sq. Select("jobs.*", resourceRefConcatQuery+" AS refs"). FromSelect(filteredSelectBuilder, "jobs"). - // Append all the resource references for the run as a json column + // append all the resource references for the run as a json column LeftJoin("(select * from resource_references where ResourceType='Job') AS r ON jobs.UUID=r.ResourceUUID"). GroupBy("jobs.UUID") } diff --git a/backend/src/apiserver/storage/pipeline_store_kubernetes.go b/backend/src/apiserver/storage/pipeline_store_kubernetes.go index 7059130fb33..b025f33f848 100644 --- a/backend/src/apiserver/storage/pipeline_store_kubernetes.go +++ b/backend/src/apiserver/storage/pipeline_store_kubernetes.go @@ -66,8 +66,8 @@ func (k *PipelineStoreKubernetes) ListPipelines(filterContext *model.FilterConte listOptions := []ctrlclient.ListOption{ctrlclient.UnsafeDisableDeepCopy} - if filterContext.ReferenceKey != nil && filterContext.ReferenceKey.Type == model.NamespaceResourceType { - listOptions = append(listOptions, ctrlclient.InNamespace(filterContext.ReferenceKey.ID)) + if filterContext.ReferenceKey != nil && filterContext.Type == model.NamespaceResourceType { + listOptions = append(listOptions, ctrlclient.InNamespace(filterContext.ID)) } // Be careful, the deep copy is disabled here to reduce memory allocations @@ -306,7 +306,7 @@ func (k *PipelineStoreKubernetes) GetLatestPipelineVersion(pipelineId string) (* var latestK8sPipelineVersion *v2beta1.PipelineVersion for _, k8sPipelineVersion := range k8sPipelineVersions.Items { - if latestK8sPipelineVersion == nil || k8sPipelineVersion.CreationTimestamp.Time.After(latestK8sPipelineVersion.CreationTimestamp.Time) { + if latestK8sPipelineVersion == nil || k8sPipelineVersion.CreationTimestamp.After(latestK8sPipelineVersion.CreationTimestamp.Time) { latestK8sPipelineVersion = &k8sPipelineVersion } } diff --git a/backend/src/apiserver/storage/pipeline_store_kubernetes_test.go b/backend/src/apiserver/storage/pipeline_store_kubernetes_test.go index 18a5a239dc2..aa336dedb78 100644 --- a/backend/src/apiserver/storage/pipeline_store_kubernetes_test.go +++ b/backend/src/apiserver/storage/pipeline_store_kubernetes_test.go @@ -152,6 +152,7 @@ func TestListK8sPipelines_Pagination_Descend(t *testing.T) { require.Equalf(t, pageSize, 3, "List size should not be zero") options, err1 = list.NewOptionsFromToken(npt, 1) + require.NoError(t, err1) pipelines, _, _, err3 := store.ListPipelines(&model.FilterContext{}, options) require.Nil(t, err3, "Failed to list pipelines: %v") require.Equalf(t, pipelines[0].Name, "Test Pipeline 3", "Pagination failed") @@ -189,6 +190,7 @@ func TestListK8sPipelinesV1_Pagination_NameAsc(t *testing.T) { require.Equalf(t, pageSize, 3, "List size should not be zero") options, err1 = list.NewOptionsFromToken(npt, 1) + require.NoError(t, err1) pipelines, _, _, err3 := store.ListPipelines(&model.FilterContext{}, options) require.Nil(t, err3, "Failed to list pipelines: %v") require.Equalf(t, pipelines[0].Name, "Test Pipeline 1", "Pagination failed") diff --git a/backend/src/apiserver/storage/run_store.go b/backend/src/apiserver/storage/run_store.go index 781c29888c3..e421c32d2c3 100644 --- a/backend/src/apiserver/storage/run_store.go +++ b/backend/src/apiserver/storage/run_store.go @@ -58,56 +58,53 @@ var runColumns = []string{ "PipelineRunContextId", } -var runMetricsColumns = []string{ - "RunUUID", - "NodeID", - "Name", - "NumberValue", - "Format", - "Payload", -} - type RunStoreInterface interface { - // Creates a run entry. Does not create children tasks. + // CreateRun creates a run entry. Does not create children tasks. CreateRun(run *model.Run) (*model.Run, error) - // Fetches a run. - GetRun(runId string) (*model.Run, error) + // GetRun fetches a run. + // If hydrateTasks is true, full task details are loaded (expensive operation). + // If hydrateTasks is false, only task count is populated (lightweight operation). + GetRun(runID string, hydrateTasks bool) (*model.Run, error) - // Fetches runs with specified options. Joins with children tasks. - ListRuns(filterContext *model.FilterContext, opts *list.Options) ([]*model.Run, int, string, error) + // ListRuns fetches runs with specified options. + // If hydrateTasks is true, full task details are loaded (expensive operation). + // If hydrateTasks is false, only task counts are populated (lightweight operation). + ListRuns(filterContext *model.FilterContext, opts *list.Options, hydrateTasks bool) ([]*model.Run, int, string, error) - // Updates a run. + // UpdateRun updates a run. // Note: only state, runtime manifest can be updated. Does not update dependent tasks. UpdateRun(run *model.Run) (err error) - // Archives a run. + // ArchiveRun archives a run. ArchiveRun(runId string) error - // Un-archives a run. + // UnarchiveRun un-archives a run. UnarchiveRun(runId string) error - // Deletes a run. + // DeleteRun deletes a run. DeleteRun(runId string) error - // Creates a new metric entry. - CreateMetric(metric *model.RunMetric) (err error) + // CreateV1Metric Creates a new metric entry. + // Deprecated: use CreateMetric instead. + CreateV1Metric(metric *model.RunMetricV1) (err error) - // Terminates a run. + // TerminateRun terminates a run. TerminateRun(runId string) error } type RunStore struct { db *DB resourceReferenceStore *ResourceReferenceStore + taskStore *TaskStore time util.TimeInterface } -// Runs two SQL queries in a transaction to return a list of matching runs, as well as their +// ListRuns runs two SQL queries in a transaction to return a list of matching runs, as well as their // total_size. The total_size does not reflect the page size, but it does reflect the number of runs // matching the supplied filters and resource references. func (s *RunStore) ListRuns( - filterContext *model.FilterContext, opts *list.Options, + filterContext *model.FilterContext, opts *list.Options, hydrateTasks bool, ) ([]*model.Run, int, string, error) { errorF := func(err error) ([]*model.Run, int, string, error) { return nil, 0, "", util.NewInternalServerError(err, "Failed to list runs: %v", err) @@ -166,12 +163,35 @@ func (s *RunStore) ListRuns( return errorF(err) } + // Either hydrate full task details or just populate task counts for the runs we return on this page if len(runs) <= opts.PageSize { + if hydrateTasks { + if err := s.hydrateTasksForRuns(runs); err != nil { + return errorF(err) + } + } else { + if err := s.populateTaskCountsForRuns(runs); err != nil { + return errorF(err) + } + } return runs, total_size, "", nil } npt, err := opts.NextPageToken(runs[opts.PageSize]) - return runs[:opts.PageSize], total_size, npt, err + if err != nil { + return errorF(err) + } + page := runs[:opts.PageSize] + if hydrateTasks { + if err := s.hydrateTasksForRuns(page); err != nil { + return errorF(err) + } + } else { + if err := s.populateTaskCountsForRuns(page); err != nil { + return errorF(err) + } + } + return page, total_size, npt, nil } func (s *RunStore) buildSelectRunsQuery(selectCount bool, opts *list.Options, @@ -215,11 +235,13 @@ func (s *RunStore) buildSelectRunsQuery(selectCount bool, opts *list.Options, } // GetRun Get the run manifest from Workflow CRD. -func (s *RunStore) GetRun(runId string) (*model.Run, error) { +// If hydrateTasks is true, full task details are loaded (expensive operation). +// If hydrateTasks is false, only task count is populated (lightweight operation). +func (s *RunStore) GetRun(runID string, hydrateTasks bool) (*model.Run, error) { sql, args, err := s.addMetricsResourceReferencesAndTasks( sq.Select(runColumns...). From("run_details"). - Where(sq.Eq{"UUID": runId}). + Where(sq.Eq{"UUID": runID}). Limit(1), nil). ToSql() if err != nil { @@ -236,15 +258,115 @@ func (s *RunStore) GetRun(runId string) (*model.Run, error) { return nil, util.NewInternalServerError(err, "Failed to get run: %v", err.Error()) } if len(runs) == 0 { - return nil, util.NewResourceNotFoundError("Run", fmt.Sprint(runId)) + return nil, util.NewResourceNotFoundError("Run", fmt.Sprint(runID)) } if string(runs[0].WorkflowRuntimeManifest) == "" && string(runs[0].WorkflowSpecManifest) != "" { // This can only happen when workflow reporting is failed. - return nil, util.NewResourceNotFoundError("Failed to get run: %s", runId) + return nil, util.NewResourceNotFoundError("Failed to get run: %s", runID) + } + + // Either hydrate full task details or just populate task count + if hydrateTasks { + if err := s.hydrateTasksForRuns(runs); err != nil { + return nil, util.NewInternalServerError(err, "Failed to get run tasks: %v", err) + } + } else { + if err := s.populateTaskCountsForRuns(runs); err != nil { + return nil, util.NewInternalServerError(err, "Failed to get run task counts: %v", err) + } } return runs[0], nil } +// hydrateTasksForRuns fetches tasks for the provided runs and assigns them to the Run model. +// It issues queries using WHERE RunUUID IN (...) and groups results by RunUUID. +// It also maps artifacts to tasks using artifact_tasks joined with artifacts. +func (s *RunStore) hydrateTasksForRuns(runs []*model.Run) error { + if len(runs) == 0 { + return nil + } + ids := make([]string, 0, len(runs)) + index := make(map[string]*model.Run, len(runs)) + for _, r := range runs { + if r == nil || r.UUID == "" { + continue + } + if _, ok := index[r.UUID]; !ok { + index[r.UUID] = r + ids = append(ids, r.UUID) + } + } + + // Select only needed columns from tasks; scan and attach in Go. + sqlQuery, args, err := sq. + Select(taskColumns...). + From("tasks"). + Where(sq.Eq{"RunUUID": ids}). + ToSql() + if err != nil { + return err + } + + rows, err := s.db.Query(sqlQuery, args...) + if err != nil { + return err + } + defer rows.Close() + + // Map tasks by ID for later artifact hydration + taskByID := make(map[string]*model.Task) + for rows.Next() { + task, err := scanTaskRow(rows) + if err != nil { + return err + } + taskByID[task.UUID] = task + if run, ok := index[task.RunUUID]; ok { + if run.Tasks == nil { + run.Tasks = []*model.Task{} + } + run.Tasks = append(run.Tasks, task) + } + } + if err := rows.Err(); err != nil { + return err + } + + if len(taskByID) == 0 { + return nil + } + + // Hydrate artifacts for these tasks using generalized helper + allTasks := make([]*model.Task, 0, len(taskByID)) + for _, t := range taskByID { + allTasks = append(allTasks, t) + } + return hydrateArtifactsForTasks(s.db, allTasks) +} + +// populateTaskCountsForRuns fetches task counts for the provided runs and assigns them to the Run model. +// This is a lightweight alternative to hydrateTasksForRuns that only populates the TaskCount field +// without performing expensive task hydration. +func (s *RunStore) populateTaskCountsForRuns(runs []*model.Run) error { + if len(runs) == 0 { + return nil + } + + for _, run := range runs { + if run == nil || run.UUID == "" { + continue + } + + count, err := s.taskStore.GetTaskCountForRun(run.UUID) + if err != nil { + return err + } + run.TaskCount = count + } + + return nil +} + // Applies a func f to every string in a given string slice. func apply(f func(string) string, vs []string) []string { vsm := make([]string, len(vs)) @@ -269,26 +391,11 @@ func (s *RunStore) addMetricsResourceReferencesAndTasks(filteredSelectBuilder sq LeftJoin("resource_references AS rr ON rr.ResourceType='Run' AND rd.UUID=rr.ResourceUUID"). GroupBy("rd.UUID") - tasksConcatQuery := s.db.Concat([]string{`"["`, s.db.GroupConcat("tasks.Payload", ","), `"]"`}, "") - columnsAfterJoiningTasks := append( - apply(func(column string) string { return "rdref." + column }, runColumns), - "rdref.refs", - tasksConcatQuery+" AS taskDetails") - if opts != nil && !r.IsRegularField(opts.SortByFieldName) { - columnsAfterJoiningTasks = append(columnsAfterJoiningTasks, "rdref."+opts.SortByFieldName) - } - subQ = sq. - Select(columnsAfterJoiningTasks...). - FromSelect(subQ, "rdref"). - LeftJoin("tasks AS tasks ON rdref.UUID=tasks.RunUUID"). - GroupBy("rdref.UUID") - - // TODO(jingzhang36): address the case where some runs don't have the metric used in order by. + // TODO(HumairAK): Remove this join on metrics when v1 is removed metricConcatQuery := s.db.Concat([]string{`"["`, s.db.GroupConcat("rm.Payload", ","), `"]"`}, "") columnsAfterJoiningRunMetrics := append( apply(func(column string) string { return "subq." + column }, runColumns), // Add prefix "subq." to runColumns "subq.refs", - "subq.taskDetails", metricConcatQuery+" AS metrics") return sq. Select(columnsAfterJoiningRunMetrics...). @@ -304,7 +411,7 @@ func (s *RunStore) scanRowsToRuns(rows *sql.Rows) ([]*model.Run, error) { pipelineName, pipelineSpecManifest, workflowSpecManifest, parameters, pipelineRuntimeManifest, workflowRuntimeManifest string var createdAtInSec, scheduledAtInSec, finishedAtInSec, pipelineContextId, pipelineRunContextId sql.NullInt64 - var metricsInString, resourceReferencesInString, tasksInString, runtimeParameters, pipelineRoot, jobId, state, stateHistory, pipelineVersionId sql.NullString + var metricsInString, resourceReferencesInString, runtimeParameters, pipelineRoot, jobID, state, stateHistory, pipelineVersionID sql.NullString err := rows.Scan( &uuid, &experimentUUID, @@ -319,7 +426,7 @@ func (s *RunStore) scanRowsToRuns(rows *sql.Rows) ([]*model.Run, error) { &finishedAtInSec, &conditions, &pipelineId, - &pipelineVersionId, + &pipelineVersionID, &pipelineName, &pipelineSpecManifest, &workflowSpecManifest, @@ -328,13 +435,12 @@ func (s *RunStore) scanRowsToRuns(rows *sql.Rows) ([]*model.Run, error) { &pipelineRoot, &pipelineRuntimeManifest, &workflowRuntimeManifest, - &jobId, + &jobID, &state, &stateHistory, &pipelineContextId, &pipelineRunContextId, &resourceReferencesInString, - &tasksInString, &metricsInString, ) if err != nil { @@ -346,19 +452,15 @@ func (s *RunStore) scanRowsToRuns(rows *sql.Rows) ([]*model.Run, error) { glog.Errorf("Failed to parse metrics (%v) from DB: %v", metricsInString, err) // Skip the error to allow user to get runs even when metrics data // are invalid. - metrics = []*model.RunMetric{} + metrics = []*model.RunMetricV1{} } resourceReferences, err := parseResourceReferences(resourceReferencesInString) if err != nil { // throw internal exception if failed to parse the resource reference. return nil, util.NewInternalServerError(err, "Failed to parse resource reference") } - tasks, err := parseTaskDetails(tasksInString) - if err != nil { - return nil, util.NewInternalServerError(err, "Failed to parse task details") - } - jId := jobId.String - pvId := pipelineVersionId.String + jID := jobID.String + pvID := pipelineVersionID.String if len(resourceReferences) > 0 { if experimentUUID == "" { experimentUUID = model.GetRefIdFromResourceReferences(resourceReferences, model.ExperimentResourceType) @@ -369,17 +471,20 @@ func (s *RunStore) scanRowsToRuns(rows *sql.Rows) ([]*model.Run, error) { if pipelineId == "" { pipelineId = model.GetRefIdFromResourceReferences(resourceReferences, model.PipelineResourceType) } - if pvId == "" { - pvId = model.GetRefIdFromResourceReferences(resourceReferences, model.PipelineVersionResourceType) + if pvID == "" { + pvID = model.GetRefIdFromResourceReferences(resourceReferences, model.PipelineVersionResourceType) } - if jId == "" { - jId = model.GetRefIdFromResourceReferences(resourceReferences, model.JobResourceType) + if jID == "" { + jID = model.GetRefIdFromResourceReferences(resourceReferences, model.JobResourceType) } } runtimeConfig := parseRuntimeConfig(runtimeParameters, pipelineRoot) var stateHistoryNew []*model.RuntimeStatus if stateHistory.Valid { - json.Unmarshal([]byte(stateHistory.String), &stateHistoryNew) + err := json.Unmarshal([]byte(stateHistory.String), &stateHistoryNew) + if err != nil { + return nil, err + } } run := &model.Run{ UUID: uuid, @@ -390,7 +495,7 @@ func (s *RunStore) scanRowsToRuns(rows *sql.Rows) ([]*model.Run, error) { Namespace: namespace, ServiceAccount: serviceAccount, Description: string(description), - RecurringRunId: jId, + RecurringRunId: jID, RunDetails: model.RunDetails{ CreatedAtInSec: createdAtInSec.Int64, ScheduledAtInSec: scheduledAtInSec.Int64, @@ -401,14 +506,13 @@ func (s *RunStore) scanRowsToRuns(rows *sql.Rows) ([]*model.Run, error) { WorkflowRuntimeManifest: model.LargeText(workflowRuntimeManifest), PipelineContextId: pipelineContextId.Int64, PipelineRunContextId: pipelineRunContextId.Int64, - TaskDetails: tasks, StateHistory: stateHistoryNew, }, Metrics: metrics, ResourceReferences: resourceReferences, PipelineSpec: model.PipelineSpec{ PipelineId: pipelineId, - PipelineVersionId: pvId, + PipelineVersionId: pvID, PipelineName: pipelineName, PipelineSpecManifest: model.LargeText(pipelineSpecManifest), WorkflowSpecManifest: model.LargeText(workflowSpecManifest), @@ -422,11 +526,11 @@ func (s *RunStore) scanRowsToRuns(rows *sql.Rows) ([]*model.Run, error) { return runs, nil } -func parseMetrics(metricsInString sql.NullString) ([]*model.RunMetric, error) { +func parseMetrics(metricsInString sql.NullString) ([]*model.RunMetricV1, error) { if !metricsInString.Valid { return nil, nil } - var metrics []*model.RunMetric + var metrics []*model.RunMetricV1 if err := json.Unmarshal([]byte(metricsInString.String), &metrics); err != nil { return nil, util.Wrapf(err, "Failed to parse a run metric '%s'", metricsInString.String) } @@ -455,17 +559,6 @@ func parseResourceReferences(resourceRefString sql.NullString) ([]*model.Resourc return refs, nil } -func parseTaskDetails(tasksInString sql.NullString) ([]*model.Task, error) { - if !tasksInString.Valid { - return nil, nil - } - var taskDetails []*model.Task - if err := json.Unmarshal([]byte(tasksInString.String), &taskDetails); err != nil { - return nil, util.Wrapf(err, "Failed to parse task details '%s'", tasksInString.String) - } - return taskDetails, nil -} - func (s *RunStore) CreateRun(r *model.Run) (*model.Run, error) { r = r.ToV1().ToV2() if r.StorageState == "" || r.StorageState == model.StorageStateUnspecified || r.StorageState == model.StorageStateUnspecifiedV1 { @@ -662,6 +755,12 @@ func (s *RunStore) DeleteRun(id string) error { if err != nil { return util.NewInternalServerError(err, "Failed to create a new transaction to delete run") } + // Delete tasks first to avoid foreign key constraint violations + err = s.taskStore.DeleteTasksForRun(tx, id) + if err != nil { + tx.Rollback() + return util.NewInternalServerError(err, "Failed to delete tasks for run %v", id) + } _, err = tx.Exec(runSql, runArgs...) if err != nil { tx.Rollback() @@ -680,8 +779,8 @@ func (s *RunStore) DeleteRun(id string) error { return nil } -// Creates a new metric in run_metrics table if does not exist. -func (s *RunStore) CreateMetric(metric *model.RunMetric) error { +// CreateV1Metric Creates a new metric in run_metrics table if does not exist. +func (s *RunStore) CreateV1Metric(metric *model.RunMetricV1) error { payloadBytes, err := json.Marshal(metric) if err != nil { return util.NewInternalServerError(err, @@ -712,11 +811,12 @@ func (s *RunStore) CreateMetric(metric *model.RunMetric) error { return nil } -// Returns a new RunStore. +// NewRunStore Returns a new RunStore. func NewRunStore(db *DB, time util.TimeInterface) *RunStore { return &RunStore{ db: db, resourceReferenceStore: NewResourceReferenceStore(db, nil), + taskStore: NewTaskStore(db, time, util.NewUUIDGenerator()), time: time, } } @@ -760,36 +860,3 @@ func (s *RunStore) addSortByRunMetricToSelect(sqlBuilder sq.SelectBuilder, opts FromSelect(sqlBuilder, "selected_runs"). LeftJoin("run_metrics ON selected_runs.uuid=run_metrics.runuuid AND run_metrics.name='" + opts.SortByFieldName + "'") } - -func (s *RunStore) scanRowsToRunMetrics(rows *sql.Rows) ([]*model.RunMetric, error) { - var metrics []*model.RunMetric - for rows.Next() { - var runId, nodeId, name, form, payload string - var val float64 - err := rows.Scan( - &runId, - &nodeId, - &name, - &val, - &form, - &payload, - ) - if err != nil { - glog.Errorf("Failed to scan row into a run metric: %v", err) - return metrics, nil - } - - metrics = append( - metrics, - &model.RunMetric{ - RunUUID: runId, - NodeID: nodeId, - Name: name, - NumberValue: val, - Format: form, - Payload: model.LargeText(payload), - }, - ) - } - return metrics, nil -} diff --git a/backend/src/apiserver/storage/run_store_test.go b/backend/src/apiserver/storage/run_store_test.go index fcd17f6cc90..a5f2ff81ea2 100644 --- a/backend/src/apiserver/storage/run_store_test.go +++ b/backend/src/apiserver/storage/run_store_test.go @@ -37,7 +37,7 @@ const ( defaultFakeRunIdThree = "123e4567-e89b-12d3-a456-426655440023" ) -type RunMetricSorter []*model.RunMetric +type RunMetricSorter []*model.RunMetricV1 func (r RunMetricSorter) Len() int { return len(r) } func (r RunMetricSorter) Less(i, j int) bool { return r[i].Name < r[j].Name } @@ -119,22 +119,22 @@ func initializeRunStore() (*DB, *RunStore) { runStore.CreateRun(run2) runStore.CreateRun(run3) - metric1 := &model.RunMetric{ + metric1 := &model.RunMetricV1{ RunUUID: "1", NodeID: "node1", Name: "dummymetric", NumberValue: 1.0, Format: "PERCENTAGE", } - metric2 := &model.RunMetric{ + metric2 := &model.RunMetricV1{ RunUUID: "2", NodeID: "node2", Name: "dummymetric", NumberValue: 2.0, Format: "PERCENTAGE", } - runStore.CreateMetric(metric1) - runStore.CreateMetric(metric2) + runStore.CreateV1Metric(metric1) + runStore.CreateV1Metric(metric2) return db, runStore } @@ -164,7 +164,7 @@ func TestListRuns_Pagination(t *testing.T) { }, }, }, - Metrics: []*model.RunMetric{ + Metrics: []*model.RunMetricV1{ { RunUUID: "1", NodeID: "node1", @@ -204,7 +204,7 @@ func TestListRuns_Pagination(t *testing.T) { }, }, }, - Metrics: []*model.RunMetric{ + Metrics: []*model.RunMetricV1{ { RunUUID: "2", NodeID: "node2", @@ -227,7 +227,7 @@ func TestListRuns_Pagination(t *testing.T) { assert.Nil(t, err) runs, total_size, nextPageToken, err := runStore.ListRuns( - &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts) + &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts, false) runs[0] = runs[0].ToV1() assert.Nil(t, err) assert.Equal(t, 2, total_size) @@ -237,7 +237,7 @@ func TestListRuns_Pagination(t *testing.T) { opts, err = list.NewOptionsFromToken(nextPageToken, 1) assert.Nil(t, err) runs, total_size, nextPageToken, err = runStore.ListRuns( - &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts) + &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts, false) runs[0] = runs[0].ToV1() assert.Nil(t, err) assert.Equal(t, 2, total_size) @@ -270,7 +270,7 @@ func TestListRuns_Pagination_WithSortingOnMetrics(t *testing.T) { }, }, }, - Metrics: []*model.RunMetric{ + Metrics: []*model.RunMetricV1{ { RunUUID: "1", NodeID: "node1", @@ -309,7 +309,7 @@ func TestListRuns_Pagination_WithSortingOnMetrics(t *testing.T) { }, }, }, - Metrics: []*model.RunMetric{ + Metrics: []*model.RunMetricV1{ { RunUUID: "2", NodeID: "node2", @@ -333,7 +333,7 @@ func TestListRuns_Pagination_WithSortingOnMetrics(t *testing.T) { assert.Nil(t, err) runs, total_size, nextPageToken, err := runStore.ListRuns( - &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts) + &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts, false) runs[0] = runs[0].ToV1() assert.Nil(t, err) assert.Equal(t, 2, total_size) @@ -343,7 +343,7 @@ func TestListRuns_Pagination_WithSortingOnMetrics(t *testing.T) { opts, err = list.NewOptionsFromToken(nextPageToken, 1) assert.Nil(t, err) runs, total_size, nextPageToken, err = runStore.ListRuns( - &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts) + &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts, false) runs[0] = runs[0].ToV1() assert.Nil(t, err) assert.Equal(t, 2, total_size) @@ -355,7 +355,7 @@ func TestListRuns_Pagination_WithSortingOnMetrics(t *testing.T) { assert.Nil(t, err) runs, total_size, nextPageToken, err = runStore.ListRuns( - &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts) + &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts, false) runs[0] = runs[0].ToV1() assert.Nil(t, err) assert.Equal(t, 2, total_size) @@ -365,7 +365,7 @@ func TestListRuns_Pagination_WithSortingOnMetrics(t *testing.T) { opts, err = list.NewOptionsFromToken(nextPageToken, 1) assert.Nil(t, err) runs, total_size, nextPageToken, err = runStore.ListRuns( - &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts) + &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts, false) runs[0] = runs[0].ToV1() assert.Nil(t, err) assert.Equal(t, 2, total_size) @@ -380,10 +380,10 @@ func TestListRuns_TotalSizeWithNoFilter(t *testing.T) { opts, _ := list.NewOptions(&model.Run{}, 4, "", nil) // No filter - runs, total_size, _, err := runStore.ListRuns(&model.FilterContext{}, opts) + runs, totalSize, _, err := runStore.ListRuns(&model.FilterContext{}, opts, false) assert.Nil(t, err) assert.Equal(t, 3, len(runs)) - assert.Equal(t, 3, total_size) + assert.Equal(t, 3, totalSize) } func TestListRuns_TotalSizeWithFilter(t *testing.T) { @@ -406,10 +406,10 @@ func TestListRuns_TotalSizeWithFilter(t *testing.T) { } newFilter, _ := filter.New(filterProto) opts, _ := list.NewOptions(&model.Run{}, 4, "", newFilter) - runs, total_size, _, err := runStore.ListRuns(&model.FilterContext{}, opts) + runs, totalSize, _, err := runStore.ListRuns(&model.FilterContext{}, opts, false) assert.Nil(t, err) assert.Equal(t, 2, len(runs)) - assert.Equal(t, 2, total_size) + assert.Equal(t, 2, totalSize) } func TestListRuns_Pagination_Descend(t *testing.T) { @@ -437,7 +437,7 @@ func TestListRuns_Pagination_Descend(t *testing.T) { }, }, }, - Metrics: []*model.RunMetric{ + Metrics: []*model.RunMetricV1{ { RunUUID: "2", NodeID: "node2", @@ -476,7 +476,7 @@ func TestListRuns_Pagination_Descend(t *testing.T) { }, }, }, - Metrics: []*model.RunMetric{ + Metrics: []*model.RunMetricV1{ { RunUUID: "1", NodeID: "node1", @@ -498,7 +498,7 @@ func TestListRuns_Pagination_Descend(t *testing.T) { opts, err := list.NewOptions(&model.Run{}, 1, "id desc", nil) assert.Nil(t, err) runs, total_size, nextPageToken, err := runStore.ListRuns( - &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts) + &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts, false) for i, run := range runs { runs[i] = run.ToV1() fmt.Printf("%+v\n", run) @@ -512,7 +512,7 @@ func TestListRuns_Pagination_Descend(t *testing.T) { opts, err = list.NewOptionsFromToken(nextPageToken, 1) assert.Nil(t, err) runs, total_size, nextPageToken, err = runStore.ListRuns( - &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts) + &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts, false) runs[0] = runs[0].ToV1() assert.Nil(t, err) assert.Equal(t, 2, total_size) @@ -546,7 +546,7 @@ func TestListRuns_Pagination_LessThanPageSize(t *testing.T) { }, }, }, - Metrics: []*model.RunMetric{ + Metrics: []*model.RunMetricV1{ { RunUUID: "1", NodeID: "node1", @@ -583,7 +583,7 @@ func TestListRuns_Pagination_LessThanPageSize(t *testing.T) { }, }, }, - Metrics: []*model.RunMetric{ + Metrics: []*model.RunMetricV1{ { RunUUID: "2", NodeID: "node2", @@ -606,7 +606,7 @@ func TestListRuns_Pagination_LessThanPageSize(t *testing.T) { opts, err := list.NewOptions(&model.Run{}, 10, "", nil) assert.Nil(t, err) runs, total_size, nextPageToken, err := runStore.ListRuns( - &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts) + &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts, false) runs[0] = runs[0].ToV1() runs[1] = runs[1].ToV1() @@ -622,7 +622,7 @@ func TestListRunsError(t *testing.T) { opts, err := list.NewOptions(&model.Run{}, 1, "", nil) _, _, _, err = runStore.ListRuns( - &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts) + &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts, false) assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode(), "Expected to throw an internal error") } @@ -651,7 +651,7 @@ func TestGetRun(t *testing.T) { }, }, }, - Metrics: []*model.RunMetric{ + Metrics: []*model.RunMetricV1{ { RunUUID: "1", NodeID: "node1", @@ -668,7 +668,7 @@ func TestGetRun(t *testing.T) { }, } - runDetail, err := runStore.GetRun("1") + runDetail, err := runStore.GetRun("1", false) assert.Nil(t, err) assert.Equal(t, expectedRun.ToV1(), runDetail.ToV1()) } @@ -677,7 +677,7 @@ func TestGetRun_NotFoundError(t *testing.T) { db, runStore := initializeRunStore() defer db.Close() - _, err := runStore.GetRun("notfound") + _, err := runStore.GetRun("notfound", false) assert.Equal(t, codes.NotFound, err.(*util.UserError).ExternalStatusCode(), "Expected not to find the run") } @@ -686,7 +686,7 @@ func TestGetRun_InternalError(t *testing.T) { db, runStore := initializeRunStore() db.Close() - _, err := runStore.GetRun("1") + _, err := runStore.GetRun("1", false) assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode(), "Expected get run to return internal error") } @@ -715,7 +715,7 @@ func TestCreateAndUpdateRun_UpdateSuccess(t *testing.T) { }, }, }, - Metrics: []*model.RunMetric{ + Metrics: []*model.RunMetricV1{ { RunUUID: "1", NodeID: "node1", @@ -732,7 +732,7 @@ func TestCreateAndUpdateRun_UpdateSuccess(t *testing.T) { }, } - runDetail, err := runStore.GetRun("1") + runDetail, err := runStore.GetRun("1", false) assert.Nil(t, err) assert.Equal(t, expectedRun.ToV1(), runDetail.ToV1()) @@ -772,7 +772,7 @@ func TestCreateAndUpdateRun_UpdateSuccess(t *testing.T) { }, }, }, - Metrics: []*model.RunMetric{ + Metrics: []*model.RunMetricV1{ { RunUUID: "1", NodeID: "node1", @@ -789,7 +789,7 @@ func TestCreateAndUpdateRun_UpdateSuccess(t *testing.T) { }, } - runDetail, err = runStore.GetRun("1") + runDetail, err = runStore.GetRun("1", false) assert.Nil(t, err) assert.Equal(t, expectedRun.ToV1(), runDetail.ToV1()) } @@ -800,7 +800,7 @@ func TestCreateAndUpdateRun_CreateSuccess(t *testing.T) { expStore := NewExperimentStore(db, util.NewFakeTimeForEpoch(), util.NewFakeUUIDGeneratorOrFatal(defaultFakeExpId, nil)) expStore.CreateExperiment(&model.Experiment{Name: "exp1"}) // Checking that the run is not yet in the DB - _, err := runStore.GetRun("2000") + _, err := runStore.GetRun("2000", false) assert.NotNil(t, err) runDetail := &model.Run{ @@ -847,7 +847,7 @@ func TestCreateAndUpdateRun_CreateSuccess(t *testing.T) { StorageState: model.StorageStateAvailable, } - runDetail, err = runStore.GetRun("2000") + runDetail, err = runStore.GetRun("2000", false) assert.Nil(t, err) assert.Equal(t, expectedRun.ToV1(), runDetail.ToV1()) } @@ -911,7 +911,7 @@ func TestCreateOrUpdateRun_DuplicateUUID(t *testing.T) { WorkflowRuntimeManifest: "workflow1", State: model.RuntimeStateRunning, }, - Metrics: []*model.RunMetric{ + Metrics: []*model.RunMetricV1{ { RunUUID: "1", NodeID: "node1", @@ -964,7 +964,7 @@ func TestTerminateRun(t *testing.T) { }, }, }, - Metrics: []*model.RunMetric{ + Metrics: []*model.RunMetricV1{ { RunUUID: "1", NodeID: "node1", @@ -981,7 +981,7 @@ func TestTerminateRun(t *testing.T) { }, } - runDetail, err := runStore.GetRun("1") + runDetail, err := runStore.GetRun("1", false) assert.Nil(t, err) assert.Equal(t, expectedRun.ToV1(), runDetail.ToV1()) } @@ -1008,19 +1008,19 @@ func TestCreateMetric_Success(t *testing.T) { db, runStore := initializeRunStore() defer db.Close() - metric := &model.RunMetric{ + metric := &model.RunMetricV1{ RunUUID: "1", NodeID: "node1", Name: "acurracy", NumberValue: 0.77, Format: "PERCENTAGE", } - runStore.CreateMetric(metric) + runStore.CreateV1Metric(metric) - runDetail, err := runStore.GetRun("1") + runDetail, err := runStore.GetRun("1", false) assert.Nil(t, err, "Got error: %+v", err) sort.Sort(RunMetricSorter(runDetail.Metrics)) - assert.Equal(t, []*model.RunMetric{ + assert.Equal(t, []*model.RunMetricV1{ metric, { RunUUID: "1", @@ -1036,23 +1036,23 @@ func TestCreateMetric_DupReports_Fail(t *testing.T) { db, runStore := initializeRunStore() defer db.Close() - metric1 := &model.RunMetric{ + metric1 := &model.RunMetricV1{ RunUUID: "1", NodeID: "node1", Name: "acurracy", NumberValue: 0.77, Format: "PERCENTAGE", } - metric2 := &model.RunMetric{ + metric2 := &model.RunMetricV1{ RunUUID: "1", NodeID: "node1", Name: "acurracy", NumberValue: 0.88, Format: "PERCENTAGE", } - runStore.CreateMetric(metric1) + runStore.CreateV1Metric(metric1) - err := runStore.CreateMetric(metric2) + err := runStore.CreateV1Metric(metric2) _, ok := err.(*util.UserError) assert.True(t, ok) } @@ -1072,7 +1072,7 @@ func TestGetRun_InvalidMetricPayload_Ignore(t *testing.T) { }).ToSql() db.Exec(sql, args...) - run, err := runStore.GetRun("1") + run, err := runStore.GetRun("1", false) assert.Nil(t, err, "Got error: %+v", err) assert.Empty(t, run.Metrics) } @@ -1080,30 +1080,30 @@ func TestGetRun_InvalidMetricPayload_Ignore(t *testing.T) { func TestListRuns_WithMetrics(t *testing.T) { db, runStore := initializeRunStore() defer db.Close() - metric1 := &model.RunMetric{ + metric1 := &model.RunMetricV1{ RunUUID: "1", NodeID: "node1", Name: "acurracy", NumberValue: 0.77, Format: "PERCENTAGE", } - metric2 := &model.RunMetric{ + metric2 := &model.RunMetricV1{ RunUUID: "1", NodeID: "node2", Name: "logloss", NumberValue: -1.2, Format: "RAW", } - metric3 := &model.RunMetric{ + metric3 := &model.RunMetricV1{ RunUUID: "2", NodeID: "node2", Name: "logloss", NumberValue: -1.3, Format: "RAW", } - runStore.CreateMetric(metric1) - runStore.CreateMetric(metric2) - runStore.CreateMetric(metric3) + runStore.CreateV1Metric(metric1) + runStore.CreateV1Metric(metric2) + runStore.CreateV1Metric(metric3) expectedRuns := []*model.Run{ { @@ -1132,7 +1132,7 @@ func TestListRuns_WithMetrics(t *testing.T) { PipelineRoot: "gs://my-bucket/path/to/root/run1", }, }, - Metrics: []*model.RunMetric{ + Metrics: []*model.RunMetricV1{ { RunUUID: "1", NodeID: "node1", @@ -1171,7 +1171,7 @@ func TestListRuns_WithMetrics(t *testing.T) { PipelineRoot: "gs://my-bucket/path/to/root/run2", }, }, - Metrics: []*model.RunMetric{ + Metrics: []*model.RunMetricV1{ { RunUUID: "2", NodeID: "node2", @@ -1188,10 +1188,10 @@ func TestListRuns_WithMetrics(t *testing.T) { opts, err := list.NewOptions(&model.Run{}, 2, "id", nil) assert.Nil(t, err) - runs, total_size, _, err := runStore.ListRuns(&model.FilterContext{}, opts) + runs, totalSize, _, err := runStore.ListRuns(&model.FilterContext{}, opts, false) runs[0] = runs[0].ToV1() runs[1] = runs[1].ToV1() - assert.Equal(t, 3, total_size) + assert.Equal(t, 3, totalSize) assert.Nil(t, err) for _, run := range expectedRuns { sort.Sort(RunMetricSorter(run.Metrics)) @@ -1214,7 +1214,7 @@ func TestArchiveRun(t *testing.T) { // Archive run err = runStore.ArchiveRun("1") assert.Nil(t, err) - run, getRunErr := runStore.GetRun("1") + run, getRunErr := runStore.GetRun("1", false) assert.Nil(t, getRunErr) assert.Equal(t, run.StorageState, model.StorageStateArchived) @@ -1246,14 +1246,14 @@ func TestUnarchiveRun(t *testing.T) { // Archive run err = runStore.ArchiveRun("1") assert.Nil(t, err) - run, getRunErr := runStore.GetRun("1") + run, getRunErr := runStore.GetRun("1", false) assert.Nil(t, getRunErr) assert.Equal(t, run.StorageState, model.StorageStateArchived) // Unarchive it back err = runStore.UnarchiveRun("1") assert.Nil(t, err) - run, getRunErr = runStore.GetRun("1") + run, getRunErr = runStore.GetRun("1", false) assert.Nil(t, getRunErr) assert.Equal(t, run.StorageState, model.StorageStateAvailable) @@ -1280,7 +1280,7 @@ func TestArchiveRun_IncludedInRunList(t *testing.T) { // Archive run err := runStore.ArchiveRun("1") assert.Nil(t, err) - run, getRunErr := runStore.GetRun("1") + run, getRunErr := runStore.GetRun("1", false) assert.Nil(t, getRunErr) assert.Equal(t, run.StorageState, model.StorageStateArchived) @@ -1306,7 +1306,7 @@ func TestArchiveRun_IncludedInRunList(t *testing.T) { }, }, }, - Metrics: []*model.RunMetric{ + Metrics: []*model.RunMetricV1{ { RunUUID: "1", NodeID: "node1", @@ -1326,7 +1326,7 @@ func TestArchiveRun_IncludedInRunList(t *testing.T) { expectedRuns[0] = expectedRuns[0].ToV1() opts, err := list.NewOptions(&model.Run{}, 1, "", nil) runs, total_size, nextPageToken, err := runStore.ListRuns( - &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts) + &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.ExperimentResourceType, ID: defaultFakeExpId}}, opts, false) runs[0] = runs[0].ToV1() assert.Nil(t, err) assert.Equal(t, 2, total_size) @@ -1346,7 +1346,7 @@ func TestDeleteRun(t *testing.T) { // Delete run err = runStore.DeleteRun("1") assert.Nil(t, err) - _, err = runStore.GetRun("1") + _, err = runStore.GetRun("1", false) assert.NotNil(t, err) assert.Contains(t, err.Error(), "Run 1 not found") @@ -1366,7 +1366,7 @@ func TestDeleteRun_InternalError(t *testing.T) { } func TestParseMetrics(t *testing.T) { - expectedModelRunMetrics := []*model.RunMetric{ + expectedModelRunMetrics := []*model.RunMetricV1{ { RunUUID: "run-1", Name: "metric-1", diff --git a/backend/src/apiserver/storage/task_store.go b/backend/src/apiserver/storage/task_store.go index fcd3570ec41..92e3cfd21d0 100644 --- a/backend/src/apiserver/storage/task_store.go +++ b/backend/src/apiserver/storage/task_store.go @@ -15,53 +15,65 @@ package storage import ( + "crypto/sha256" "database/sql" + "encoding/hex" "encoding/json" "fmt" sq "github.com/Masterminds/squirrel" "github.com/golang/glog" + apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/list" "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/common/util" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/known/structpb" + "google.golang.org/protobuf/types/known/timestamppb" ) -const table_name = "tasks" +const tableName = "tasks" var taskColumns = []string{ "UUID", "Namespace", - "PipelineName", "RunUUID", - "PodName", - "MLMDExecutionID", - "CreatedTimestamp", - "StartedTimestamp", - "FinishedTimestamp", + "Pods", + "CreatedAtInSec", + "StartedInSec", + "FinishedInSec", "Fingerprint", "Name", + "DisplayName", "ParentTaskUUID", "State", + "StatusMetadata", "StateHistory", - "MLMDInputs", - "MLMDOutputs", - "ChildrenPods", + "InputParameters", + "OutputParameters", + "Type", + "TypeAttrs", + "ScopePath", } -var taskColumnsWithPayload = append(taskColumns, "Payload") +// Ensure TaskStore implements TaskStoreInterface +var _ TaskStoreInterface = (*TaskStore)(nil) type TaskStoreInterface interface { - // Create a task entry in the database. + // CreateTask Create a task entry in the database. CreateTask(task *model.Task) (*model.Task, error) - // Fetches a task with a given id. + // GetTask Fetches a task with a given id. GetTask(id string) (*model.Task, error) - // Fetches tasks for given filtering and listing options. + // ListTasks Fetches tasks for given filtering and listing options. ListTasks(filterContext *model.FilterContext, opts *list.Options) ([]*model.Task, int, string, error) - // Creates new tasks or updates the existing ones. - CreateOrUpdateTasks(tasks []*model.Task) ([]*model.Task, error) + // UpdateTask Updates an existing task entry in the database. + UpdateTask(new *model.Task) (*model.Task, error) + + // GetChildTasks Fetches all child tasks for a given task UUID. + GetChildTasks(taskID string) ([]*model.Task, error) } type TaskStore struct { @@ -79,6 +91,269 @@ func NewTaskStore(db *DB, time util.TimeInterface, uuid util.UUIDGeneratorInterf } } +// scanTaskRow scans a single row into a model.Task. It expects the column order to match taskColumns. +func scanTaskRow(rowscanner interface{ Scan(dest ...any) error }) (*model.Task, error) { + var uuid, namespace, runUUID, fingerprint string + var name, displayName, parentTaskID, pods, statusMetadata, stateHistory, inputParams, outputParams, typeAttrs, scopePath sql.NullString + var createdAtInSec, startedInSec, finishedInSec sql.NullInt64 + var taskState, taskType int32 + if err := rowscanner.Scan( + &uuid, + &namespace, + &runUUID, + &pods, + &createdAtInSec, + &startedInSec, + &finishedInSec, + &fingerprint, + &name, + &displayName, + &parentTaskID, + &taskState, + &statusMetadata, + &stateHistory, + &inputParams, + &outputParams, + &taskType, + &typeAttrs, + &scopePath, + ); err != nil { + return nil, err + } + var statusMetadataNew model.JSONData + if statusMetadata.Valid { + if err := json.Unmarshal([]byte(statusMetadata.String), &statusMetadataNew); err != nil { + return nil, err + } + } + var stateHistoryNew model.JSONSlice + if stateHistory.Valid { + if err := json.Unmarshal([]byte(stateHistory.String), &stateHistoryNew); err != nil { + return nil, err + } + } + var podsNew model.JSONSlice + if pods.Valid { + if err := json.Unmarshal([]byte(pods.String), &podsNew); err != nil { + return nil, err + } + } + var inputParameters model.JSONSlice + if inputParams.Valid { + if err := json.Unmarshal([]byte(inputParams.String), &inputParameters); err != nil { + return nil, err + } + } + var outputParameters model.JSONSlice + if outputParams.Valid { + if err := json.Unmarshal([]byte(outputParams.String), &outputParameters); err != nil { + return nil, err + } + } + var typeAttrsData model.JSONData + if typeAttrs.Valid { + if err := json.Unmarshal([]byte(typeAttrs.String), &typeAttrsData); err != nil { + return nil, err + } + } + var scopePathData model.JSONSlice + if scopePath.Valid { + if err := json.Unmarshal([]byte(scopePath.String), &scopePathData); err != nil { + return nil, err + } + } + var parentTaskIDNew *string + if parentTaskID.Valid { + parentTaskIDNew = &parentTaskID.String + } + return &model.Task{ + UUID: uuid, + Namespace: namespace, + RunUUID: runUUID, + Pods: podsNew, + CreatedAtInSec: createdAtInSec.Int64, + StartedInSec: startedInSec.Int64, + FinishedInSec: finishedInSec.Int64, + Fingerprint: fingerprint, + Name: name.String, + DisplayName: displayName.String, + ParentTaskUUID: parentTaskIDNew, + State: model.TaskStatus(taskState), + StatusMetadata: statusMetadataNew, + StateHistory: stateHistoryNew, + InputParameters: inputParameters, + OutputParameters: outputParameters, + Type: model.TaskType(taskType), + TypeAttrs: typeAttrsData, + ScopePath: scopePathData, + }, nil +} + +// hydrateArtifactsForTasks fills InputArtifactsHydrated and OutputArtifactsHydrated for provided tasks by +// querying artifact_tasks joined with artifacts. It uses TaskID IN (...) to limit scope. +func hydrateArtifactsForTasks(db *DB, tasks []*model.Task) error { + if len(tasks) == 0 { + return nil + } + // Build map and list of task IDs + taskByID := make(map[string]*model.Task, len(tasks)) + taskIDs := make([]string, 0, len(tasks)) + for _, t := range tasks { + if t == nil || t.UUID == "" { + continue + } + if _, ok := taskByID[t.UUID]; !ok { + taskByID[t.UUID] = t + taskIDs = append(taskIDs, t.UUID) + } + } + if len(taskIDs) == 0 { + return nil + } + + // Query artifact links for these tasks + sqlStr, args, err := sq. + Select( + "artifact_tasks.TaskID", + "artifact_tasks.Type", + "artifact_tasks.Producer", + "artifact_tasks.ArtifactKey", + "artifacts.UUID", + "artifacts.Namespace", + "artifacts.Type", + "artifacts.URI", + "artifacts.Name", + "artifacts.CreatedAtInSec", + "artifacts.LastUpdateInSec", + "artifacts.Metadata", + "artifacts.NumberValue", + ). + From("artifact_tasks"). + Join("artifacts ON artifact_tasks.ArtifactID = artifacts.UUID"). + Where(sq.Eq{"artifact_tasks.TaskID": taskIDs}). + ToSql() + if err != nil { + return err + } + + rows, err := db.Query(sqlStr, args...) + if err != nil { + return err + } + defer rows.Close() + + for rows.Next() { + var taskID string + var linkType sql.NullInt32 + var producer sql.NullString + var key string + var artUUID, artNamespace, artName string + var artType sql.NullInt32 + var createdAt, updatedAt sql.NullInt64 + var metadata, artURI sql.NullString + var numberValue sql.NullFloat64 + + if err := rows.Scan(&taskID, &linkType, &producer, &key, + &artUUID, &artNamespace, &artType, &artURI, &artName, &createdAt, &updatedAt, &metadata, &numberValue); err != nil { + return err + } + + task := taskByID[taskID] + if task == nil { + continue + } + + var metaMap model.JSONData + if metadata.Valid { + if err := json.Unmarshal([]byte(metadata.String), &metaMap); err != nil { + return err + } + } + mArtifact := &model.Artifact{ + UUID: artUUID, + Namespace: artNamespace, + Type: model.ArtifactType(artType.Int32), + Name: artName, + CreatedAtInSec: createdAt.Int64, + LastUpdateInSec: updatedAt.Int64, + Metadata: metaMap, + } + if artURI.Valid { + mArtifact.URI = &artURI.String + } + if numberValue.Valid { + mArtifact.NumberValue = &numberValue.Float64 + } + + // Parse producer JSON to IOProducer + var producerProto *model.IOProducer + if producer.Valid && producer.String != "" { + var producerData model.JSONData + if err := json.Unmarshal([]byte(producer.String), &producerData); err == nil { + producerProto = &model.IOProducer{} + if taskName, ok := producerData["taskName"].(string); ok { + producerProto.TaskName = taskName + } + if iteration, ok := producerData["iteration"].(float64); ok { + iterInt := int64(iteration) + producerProto.Iteration = &iterInt + } + } + } + + h := model.TaskArtifactHydrated{ + Value: mArtifact, + Producer: producerProto, + Key: key, + Type: apiv2beta1.IOType(linkType.Int32), + } + + isOutput, err := iOTypeIsOutput(apiv2beta1.IOType(linkType.Int32)) + if err != nil { + return err + } + if isOutput { + task.OutputArtifactsHydrated = append(task.OutputArtifactsHydrated, h) + } else { + task.InputArtifactsHydrated = append(task.InputArtifactsHydrated, h) + } + } + return rows.Err() +} + +func iOTypeIsOutput(ioType apiv2beta1.IOType) (bool, error) { + switch ioType { + case apiv2beta1.IOType_OUTPUT, + apiv2beta1.IOType_ITERATOR_OUTPUT, + apiv2beta1.IOType_ONE_OF_OUTPUT, + apiv2beta1.IOType_TASK_FINAL_STATUS_OUTPUT: + return true, nil + case apiv2beta1.IOType_COMPONENT_INPUT, + apiv2beta1.IOType_COLLECTED_INPUTS, + apiv2beta1.IOType_TASK_OUTPUT_INPUT, + apiv2beta1.IOType_RUNTIME_VALUE_INPUT, + apiv2beta1.IOType_ITERATOR_INPUT, + apiv2beta1.IOType_ITERATOR_INPUT_RAW, + apiv2beta1.IOType_COMPONENT_DEFAULT_INPUT: + return false, nil + default: + return false, fmt.Errorf("unknown IOType %v", ioType) + } +} + +func (s *TaskStore) scanRows(rows *sql.Rows) ([]*model.Task, error) { + var tasks []*model.Task + for rows.Next() { + t, err := scanTaskRow(rows) + if err != nil { + fmt.Printf("scan error is %v", err) + return tasks, err + } + tasks = append(tasks, t) + } + return tasks, nil +} + func (s *TaskStore) CreateTask(task *model.Task) (*model.Task, error) { // Set up UUID for task. newTask := *task @@ -88,13 +363,31 @@ func (s *TaskStore) CreateTask(task *model.Task) (*model.Task, error) { } newTask.UUID = id.String() - if newTask.CreatedTimestamp == 0 { - if newTask.StartedTimestamp == 0 { + if newTask.CreatedAtInSec == 0 { + if newTask.StartedInSec == 0 { now := s.time.Now().Unix() - newTask.StartedTimestamp = now - newTask.CreatedTimestamp = now + newTask.StartedInSec = now + newTask.CreatedAtInSec = now } else { - newTask.CreatedTimestamp = newTask.StartedTimestamp + newTask.CreatedAtInSec = newTask.StartedInSec + } + } + + // Auto-populate state history if state is set (mirrors Run behavior) + // Only append if state_history is empty OR if last state differs from current state + if newTask.State != 0 { + if len(newTask.StateHistory) == 0 || getLastTaskState(newTask.StateHistory) != newTask.State { + taskStatus := &apiv2beta1.PipelineTaskDetail_TaskStatus{ + UpdateTime: ×tamppb.Timestamp{Seconds: s.time.Now().Unix()}, + State: apiv2beta1.PipelineTaskDetail_TaskState(newTask.State), + } + newEntry, err := model.ProtoSliceToJSONSlice([]*apiv2beta1.PipelineTaskDetail_TaskStatus{taskStatus}) + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to create state history entry") + } + if len(newEntry) > 0 { + newTask.StateHistory = append(newTask.StateHistory, newEntry[0]) + } } } @@ -105,35 +398,62 @@ func (s *TaskStore) CreateTask(task *model.Task) (*model.Task, error) { return nil, util.NewInternalServerError(err, "Failed to marshal state history in a new run") } - childrenPodsString := "" - if children, err := json.Marshal(newTask.ChildrenPods); err == nil { - childrenPodsString = string(children) + podsString := "" + if podNames, err := json.Marshal(newTask.Pods); err == nil { + podsString = string(podNames) + } else { + return nil, util.NewInternalServerError(err, "Failed to marshal pod names in a new task") + } + + inputParamsString := "" + if inputParams, err := json.Marshal(newTask.InputParameters); err == nil { + inputParamsString = string(inputParams) } else { - return nil, util.NewInternalServerError(err, "Failed to marshal children pods in a new run") + return nil, util.NewInternalServerError(err, "Failed to marshal input parameters in a new task") + } + + outputParamsString := "" + if outputParams, err := json.Marshal(newTask.OutputParameters); err == nil { + outputParamsString = string(outputParams) + } else { + return nil, util.NewInternalServerError(err, "Failed to marshal output parameters in a new task") + } + + typeAttrsString := "" + if typeAttrs, err := json.Marshal(newTask.TypeAttrs); err == nil { + typeAttrsString = string(typeAttrs) + } else { + return nil, util.NewInternalServerError(err, "Failed to marshal type attributes in a new task") + } + + scopePathStr := "" + if b, err := json.Marshal(newTask.ScopePath); err == nil { + scopePathStr = string(b) + } else { + return nil, util.NewInternalServerError(err, "Failed to marshal scope path in a new task") } sql, args, err := sq. - Insert(table_name). + Insert(tableName). SetMap( sq.Eq{ - "UUID": newTask.UUID, - "Namespace": newTask.Namespace, - "PipelineName": newTask.PipelineName, - "RunUUID": newTask.RunID, - "PodName": newTask.PodName, - "MLMDExecutionID": newTask.MLMDExecutionID, - "CreatedTimestamp": newTask.CreatedTimestamp, - "StartedTimestamp": newTask.StartedTimestamp, - "FinishedTimestamp": newTask.FinishedTimestamp, - "Fingerprint": newTask.Fingerprint, - "Name": newTask.Name, - "ParentTaskUUID": newTask.ParentTaskId, - "State": newTask.State.ToString(), - "StateHistory": stateHistoryString, - "MLMDInputs": newTask.MLMDInputs, - "MLMDOutputs": newTask.MLMDOutputs, - "ChildrenPods": childrenPodsString, - "Payload": newTask.ToString(), + "UUID": newTask.UUID, + "Namespace": newTask.Namespace, + "RunUUID": newTask.RunUUID, + "Pods": podsString, + "CreatedAtInSec": newTask.CreatedAtInSec, + "StartedInSec": newTask.StartedInSec, + "FinishedInSec": newTask.FinishedInSec, + "Fingerprint": newTask.Fingerprint, + "Name": newTask.Name, + "ParentTaskUUID": newTask.ParentTaskUUID, + "ScopePath": scopePathStr, + "State": newTask.State, + "StateHistory": stateHistoryString, + "InputParameters": inputParamsString, + "OutputParameters": outputParamsString, + "Type": newTask.Type, + "TypeAttrs": typeAttrsString, }, ). ToSql() @@ -149,67 +469,7 @@ func (s *TaskStore) CreateTask(task *model.Task) (*model.Task, error) { return &newTask, nil } -func (s *TaskStore) scanRows(rows *sql.Rows) ([]*model.Task, error) { - var tasks []*model.Task - for rows.Next() { - var uuid, namespace, pipelineName, runUUID, podName, mlmdExecutionID, fingerprint string - var name, parentTaskId, state, stateHistory, inputs, outputs, children sql.NullString - var createdTimestamp, startedTimestamp, finishedTimestamp sql.NullInt64 - err := rows.Scan( - &uuid, - &namespace, - &pipelineName, - &runUUID, - &podName, - &mlmdExecutionID, - &createdTimestamp, - &startedTimestamp, - &finishedTimestamp, - &fingerprint, - &name, - &parentTaskId, - &state, - &stateHistory, - &inputs, - &outputs, - &children, - ) - if err != nil { - fmt.Printf("scan error is %v", err) - return tasks, err - } - var stateHistoryNew []*model.RuntimeStatus - if stateHistory.Valid { - json.Unmarshal([]byte(stateHistory.String), &stateHistoryNew) - } - var childrenPods []string - if children.Valid { - json.Unmarshal([]byte(children.String), &childrenPods) - } - task := &model.Task{ - UUID: uuid, - Namespace: namespace, - PipelineName: pipelineName, - RunID: runUUID, - PodName: podName, - MLMDExecutionID: mlmdExecutionID, - CreatedTimestamp: createdTimestamp.Int64, - StartedTimestamp: startedTimestamp.Int64, - FinishedTimestamp: finishedTimestamp.Int64, - Fingerprint: fingerprint, - Name: name.String, - ParentTaskId: parentTaskId.String, - StateHistory: stateHistoryNew, - MLMDInputs: model.LargeText(inputs.String), - MLMDOutputs: model.LargeText(outputs.String), - ChildrenPods: childrenPods, - } - tasks = append(tasks, task) - } - return tasks, nil -} - -// Runs two SQL queries in a transaction to return a list of matching experiments, as well as their +// ListTasks Runs two SQL queries in a transaction to return a list of matching experiments, as well as their // total_size. The total_size does not reflect the page size. func (s *TaskStore) ListTasks(filterContext *model.FilterContext, opts *list.Options) ([]*model.Task, int, string, error) { errorF := func(err error) ([]*model.Task, int, string, error) { @@ -218,12 +478,19 @@ func (s *TaskStore) ListTasks(filterContext *model.FilterContext, opts *list.Opt // SQL for getting the filtered and paginated rows sqlBuilder := sq.Select(taskColumns...).From("tasks") - if filterContext.ReferenceKey != nil && filterContext.ReferenceKey.Type == model.PipelineResourceType { - sqlBuilder = sqlBuilder.Where(sq.Eq{"PipelineName": filterContext.ReferenceKey.ID}) - } if filterContext.ReferenceKey != nil && filterContext.ReferenceKey.Type == model.RunResourceType { sqlBuilder = sqlBuilder.Where(sq.Eq{"RunUUID": filterContext.ReferenceKey.ID}) } + if filterContext.ReferenceKey != nil && filterContext.Type == model.TaskResourceType { + sqlBuilder = sqlBuilder.Where(sq.Eq{"ParentTaskUUID": filterContext.ID}) + } + if filterContext.ReferenceKey != nil && filterContext.Type == model.NamespaceResourceType { + // Only add namespace filter if namespace is not empty + // Empty namespace in single-user mode means list all tasks + if filterContext.ID != "" { + sqlBuilder = sqlBuilder.Where(sq.Eq{"Namespace": filterContext.ID}) + } + } sqlBuilder = opts.AddFilterToSelect(sqlBuilder) rowsSql, rowsArgs, err := opts.AddPaginationToSelect(sqlBuilder).ToSql() @@ -234,12 +501,19 @@ func (s *TaskStore) ListTasks(filterContext *model.FilterContext, opts *list.Opt // SQL for getting total size. This matches the query to get all the rows above, in order // to do the same filter, but counts instead of scanning the rows. sqlBuilder = sq.Select("count(*)").From("tasks") - if filterContext.ReferenceKey != nil && filterContext.ReferenceKey.Type == model.PipelineResourceType { - sqlBuilder = sqlBuilder.Where(sq.Eq{"PipelineName": filterContext.ReferenceKey.ID}) - } if filterContext.ReferenceKey != nil && filterContext.ReferenceKey.Type == model.RunResourceType { sqlBuilder = sqlBuilder.Where(sq.Eq{"RunUUID": filterContext.ReferenceKey.ID}) } + if filterContext.ReferenceKey != nil && filterContext.Type == model.TaskResourceType { + sqlBuilder = sqlBuilder.Where(sq.Eq{"ParentTaskUUID": filterContext.ID}) + } + if filterContext.ReferenceKey != nil && filterContext.Type == model.NamespaceResourceType { + // Only add namespace filter if namespace is not empty + // Empty namespace in single-user mode means list all tasks + if filterContext.ID != "" { + sqlBuilder = sqlBuilder.Where(sq.Eq{"Namespace": filterContext.ID}) + } + } sizeSql, sizeArgs, err := opts.AddFilterToSelect(sqlBuilder).ToSql() if err != nil { return errorF(err) @@ -291,15 +565,22 @@ func (s *TaskStore) ListTasks(filterContext *model.FilterContext, opts *list.Opt } if len(exps) <= opts.PageSize { + if err := hydrateArtifactsForTasks(s.db, exps); err != nil { + return errorF(err) + } return exps, total_size, "", nil } npt, err := opts.NextPageToken(exps[opts.PageSize]) - return exps[:opts.PageSize], total_size, npt, err + page := exps[:opts.PageSize] + if err := hydrateArtifactsForTasks(s.db, page); err != nil { + return errorF(err) + } + return page, total_size, npt, err } func (s *TaskStore) GetTask(id string) (*model.Task, error) { - sql, args, err := sq. + toSQL, args, err := sq. Select(taskColumns...). From("tasks"). Where(sq.Eq{"tasks.uuid": id}). @@ -307,7 +588,7 @@ func (s *TaskStore) GetTask(id string) (*model.Task, error) { if err != nil { return nil, util.NewInternalServerError(err, "Failed to create query to get task: %v", err.Error()) } - r, err := s.db.Query(sql, args...) + r, err := s.db.Query(toSQL, args...) if err != nil { return nil, util.NewInternalServerError(err, "Failed to get task: %v", err.Error()) } @@ -315,179 +596,401 @@ func (s *TaskStore) GetTask(id string) (*model.Task, error) { tasks, err := s.scanRows(r) if err != nil || len(tasks) > 1 { - return nil, util.NewInternalServerError(err, "Failed to get pipeline: %v", err.Error()) + return nil, util.NewInternalServerError(err, "Failed to get pipeline: %v", err) } if len(tasks) == 0 { return nil, util.NewResourceNotFoundError("task", fmt.Sprint(id)) } + // Hydrate artifacts for this task + if err := hydrateArtifactsForTasks(s.db, []*model.Task{tasks[0]}); err != nil { + return nil, util.NewInternalServerError(err, "Failed to hydrate task artifacts") + } return tasks[0], nil } -// Updates missing fields with existing data entries. -func (s *TaskStore) patchWithExistingTasks(tasks []*model.Task) error { - var podNames []string - for _, task := range tasks { - podNames = append(podNames, task.PodName) - } - sql, args, err := sq. +// getTaskForUpdate retrieves a task with a row-level lock (SELECT ... FOR UPDATE). +// This must be called within a transaction. +// The lock ensures that no other transaction can modify this row until the current transaction completes. +// For MySQL/PostgreSQL, this adds FOR UPDATE. For SQLite (tests), it's a no-op since SQLite doesn't support row locks. +func (s *TaskStore) getTaskForUpdate(tx *sql.Tx, id string) (*model.Task, error) { + // Build SELECT query + sqlStr, args, err := sq. Select(taskColumns...). From("tasks"). - Where(sq.Eq{"PodName": podNames}). + Where(sq.Eq{"tasks.uuid": id}). + Limit(1). ToSql() if err != nil { - return util.NewInternalServerError(err, "Failed to create query to check existing tasks") + return nil, util.NewInternalServerError(err, "Failed to create query to get task for update: %v", err.Error()) } - r, err := s.db.Query(sql, args...) + + // Add FOR UPDATE clause using the dialect (MySQL adds it, SQLite doesn't) + sqlStr = s.db.SelectForUpdate(sqlStr) + + // Execute query within the transaction + row := tx.QueryRow(sqlStr, args...) + + task, err := scanTaskRow(row) if err != nil { - return util.NewInternalServerError(err, "Failed to check existing tasks") + if err == sql.ErrNoRows { + return nil, util.NewResourceNotFoundError("task", fmt.Sprint(id)) + } + return nil, util.NewInternalServerError(err, "Failed to get task for update: %v", err) } - defer r.Close() - existingTasks, err := s.scanRows(r) + + return task, nil +} + +// UpdateTask updates an existing task in the tasks table and returns the updated task. +// Uses row-level locking to prevent race conditions when multiple concurrent updates +// try to modify the same task (e.g., loop iterations propagating parameters to parent task). +func (s *TaskStore) UpdateTask(new *model.Task) (*model.Task, error) { + if new == nil { + return nil, util.NewInvalidInputError("Failed to update task: task cannot be nil") + } + if new.UUID == "" { + return nil, util.NewInvalidInputError("Failed to update task: task ID cannot be empty") + } + + // Start a transaction to ensure atomic read-merge-write with row locking + tx, err := s.db.Begin() if err != nil { - return util.NewInternalServerError(err, "Failed to parse existing tasks") + return nil, util.NewInternalServerError(err, "Failed to start transaction for task update") + } + defer tx.Rollback() // Will be no-op if Commit() succeeds + + // Get the current task state with a row-level lock (SELECT ... FOR UPDATE) + // This prevents other concurrent updates from reading the same old state + lockedOld, err := s.getTaskForUpdate(tx, new.UUID) + if err != nil { + return nil, err + } + + // Use the locked version for merging instead of the 'old' parameter + // This ensures we merge against the most recent state + + // Build SET map dynamically so we only update provided fields. + setMap := sq.Eq{} + + // Simple scalar/string fields: update if non-empty OR explicitly zero is meaningful. + // For strings: only update when not empty to avoid erasing existing values unintentionally. + if new.Namespace != "" { + setMap["Namespace"] = new.Namespace + } + if new.RunUUID != "" { + setMap["RunUUID"] = new.RunUUID } - mapTasks := make(map[string]*model.Task, 0) - for _, task := range existingTasks { - mapTasks[task.PodName] = task + if new.Fingerprint != "" { + setMap["Fingerprint"] = new.Fingerprint } - for _, task := range tasks { - if existingTask, ok := mapTasks[task.PodName]; ok { - patchTask(task, existingTask) + if new.Name != "" { + setMap["Name"] = new.Name + } + if new.DisplayName != "" { + setMap["DisplayName"] = new.DisplayName + } + if new.ParentTaskUUID != nil { + if *new.ParentTaskUUID == "" { + setMap["ParentTaskUUID"] = nil + } else { + setMap["ParentTaskUUID"] = *new.ParentTaskUUID } } - return nil -} -// Creates new entries or updates existing ones. -func (s *TaskStore) CreateOrUpdateTasks(tasks []*model.Task) ([]*model.Task, error) { - buildQuery := func(ts []*model.Task) (string, []interface{}, error) { - sqlInsert := sq.Insert("tasks").Columns(taskColumnsWithPayload...) - for _, t := range ts { - childrenPodsString := "" - if len(t.ChildrenPods) > 0 { - children, err := json.Marshal(t.ChildrenPods) - if err != nil { - return "", nil, util.NewInternalServerError(err, "Failed to marshal child task ids in a task") - } - childrenPodsString = string(children) - } - stateHistoryString := "" - if len(t.StateHistory) > 0 { - history, err := json.Marshal(t.StateHistory) - if err != nil { - return "", nil, util.NewInternalServerError(err, "Failed to marshal state history in a task") - } - stateHistoryString = string(history) + // State and Type default to 0 which are valid enums; update only when non-zero to avoid accidental resets. + if new.State != 0 { + setMap["State"] = new.State + + // Auto-populate state history when state changes (mirrors Run behavior) + // Use lockedOld.StateHistory as the base to prevent race conditions + mergedHistory := lockedOld.StateHistory + + // Check if we need to append new state to history + if len(mergedHistory) == 0 || getLastTaskState(mergedHistory) != new.State { + taskStatus := &apiv2beta1.PipelineTaskDetail_TaskStatus{ + UpdateTime: ×tamppb.Timestamp{Seconds: s.time.Now().Unix()}, + State: apiv2beta1.PipelineTaskDetail_TaskState(new.State), } - sqlInsert = sqlInsert.Values( - t.UUID, - t.Namespace, - t.PipelineName, - t.RunID, - t.PodName, - t.MLMDExecutionID, - t.CreatedTimestamp, - t.StartedTimestamp, - t.FinishedTimestamp, - t.Fingerprint, - t.Name, - t.ParentTaskId, - t.State.ToString(), - stateHistoryString, - t.MLMDInputs, - t.MLMDOutputs, - childrenPodsString, - t.ToString(), - ) - } - return sqlInsert.ToSql() - } - - // Check for existing tasks and fill empty field with existing data. - // Assumes that PodName column is a unique key. - if err := s.patchWithExistingTasks(tasks); err != nil { - return nil, util.NewInternalServerError(err, "Failed to check for existing tasks") - } - for _, task := range tasks { - task.State = task.State.ToV2() - if task.UUID == "" { - id, err := s.uuid.NewRandom() + newEntry, err := model.ProtoSliceToJSONSlice([]*apiv2beta1.PipelineTaskDetail_TaskStatus{taskStatus}) if err != nil { - return nil, util.NewInternalServerError(err, "Failed to create an task id") + return nil, util.NewInternalServerError(err, "Failed to create state history entry") } - task.UUID = id.String() + if len(newEntry) > 0 { + mergedHistory = append(mergedHistory, newEntry[0]) + } + } + + // Marshal merged history + if b, err := json.Marshal(mergedHistory); err == nil { + setMap["StateHistory"] = string(b) + } else { + return nil, util.NewInternalServerError(err, "Failed to marshal state history in an updated task") + } + } + + if new.Type != 0 { + setMap["Type"] = new.Type + } + // Timestamps: allow update when non-zero. + if new.StartedInSec != 0 { + setMap["StartedInSec"] = new.StartedInSec + } + if new.FinishedInSec != 0 { + setMap["FinishedInSec"] = new.FinishedInSec + } + + // JSON/slice/map fields: update only if not nil (presence indicates intent). + // Note: StateHistory is now auto-populated above when State changes + if new.StatusMetadata != nil { + if b, err := json.Marshal(new.StatusMetadata); err == nil { + setMap["StatusMetadata"] = string(b) + } else { + return nil, util.NewInternalServerError(err, "Failed to marshal status metadata in an updated task") + } + } + if new.Pods != nil { + if b, err := json.Marshal(new.Pods); err == nil { + setMap["Pods"] = string(b) + } else { + return nil, util.NewInternalServerError(err, "Failed to marshal pod names in an updated task") + } + } + + // Merge input parameters using the locked old state + // This prevents race conditions where concurrent updates might overwrite each other's parameters + if new.InputParameters != nil { + // Use lockedOld (from SELECT FOR UPDATE) instead of 'old' parameter + oldInputParams := lockedOld.InputParameters + merged, err := mergeParameters(oldInputParams, new.InputParameters) + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to merge input parameters in an updated task") + } + if b, err := json.Marshal(merged); err == nil { + setMap["InputParameters"] = string(b) + } else { + return nil, util.NewInternalServerError(err, "Failed to marshal input parameters in an updated task") } - if task.CreatedTimestamp == 0 { - task.CreatedTimestamp = s.time.Now().Unix() + } + + // Merge output parameters using the locked old state + // This prevents race conditions where concurrent updates might overwrite each other's parameters + if new.OutputParameters != nil { + // Use lockedOld (from SELECT FOR UPDATE) instead of 'old' parameter + oldOutputParams := lockedOld.OutputParameters + + merged, err := mergeParameters(oldOutputParams, new.OutputParameters) + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to merge output parameters in an updated task") } - if len(task.StateHistory) == 0 || task.StateHistory[len(task.StateHistory)-1].State != task.State { - task.StateHistory = append(task.StateHistory, &model.RuntimeStatus{ - UpdateTimeInSec: s.time.Now().Unix(), - State: task.State, - }) + if b, err := json.Marshal(merged); err == nil { + setMap["OutputParameters"] = string(b) + } else { + return nil, util.NewInternalServerError(err, "Failed to marshal output parameters in an updated task") } } - // Execute the query - sql, arg, err := buildQuery(tasks) + + if new.TypeAttrs != nil { + if b, err := json.Marshal(new.TypeAttrs); err == nil { + setMap["TypeAttrs"] = string(b) + } else { + return nil, util.NewInternalServerError(err, "Failed to marshal type attributes in an updated task") + } + } + + if len(setMap) == 0 { + // Nothing to update; commit transaction and return current record + if err := tx.Commit(); err != nil { + return nil, util.NewInternalServerError(err, "Failed to commit transaction (no changes)") + } + return s.GetTask(new.UUID) + } + + // Build UPDATE query + sqlStr, args, err := sq. + Update(tableName). + SetMap(setMap). + Where(sq.Eq{"UUID": new.UUID}). + ToSql() if err != nil { - return nil, util.NewInternalServerError(err, "Failed to build query to update or insert tasks") + return nil, util.NewInternalServerError(err, "Failed to create query to update task: %v", err.Error()) } - sql = s.db.Upsert(sql, "UUID", true, taskColumnsWithPayload...) - _, err = s.db.Exec(sql, arg...) + + // Execute UPDATE within the transaction + // The row is already locked by our SELECT FOR UPDATE, so this is safe + res, err := tx.Exec(sqlStr, args...) if err != nil { - return nil, util.NewInternalServerError(err, "Failed to update or insert tasks. Query: %v. Args: %v", sql, arg) + return nil, util.NewInternalServerError(err, "Failed to update task: %v", err.Error()) } - return tasks, nil + if rows, _ := res.RowsAffected(); rows == 0 { + return nil, util.NewResourceNotFoundError("task", new.UUID) + } + + // Commit the transaction to release the row lock and make changes visible + if err := tx.Commit(); err != nil { + return nil, util.NewInternalServerError(err, "Failed to commit transaction for task update") + } + + glog.Infof("Successfully updated task %s with row-level locking", new.UUID) + return s.GetTask(new.UUID) } -// Fills empty fields in a new task with the data from an existing task. -func patchTask(original *model.Task, patch *model.Task) { - if original.UUID == "" { - original.UUID = patch.UUID +// mergeParameters merges the new parameters with the old parameters. +func mergeParameters(old, new model.JSONSlice) (model.JSONSlice, error) { + typeFunc := func() *apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter { + return &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{} } - if original.Namespace == "" { - original.Namespace = patch.Namespace + oldParams, err := model.JSONSliceToProtoSlice(old, typeFunc) + if err != nil { + return nil, err } - if original.RunID == "" { - original.RunID = patch.RunID + newParams, err := model.JSONSliceToProtoSlice(new, typeFunc) + if err != nil { + return nil, err + } + makeKey := func(p *apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter) string { + key := fmt.Sprintf("%v-%s", p.Type, p.ParameterKey) + if p.Producer != nil { + key = fmt.Sprintf("%s-%s", key, p.Producer.TaskName) + if p.Producer.Iteration != nil { + key = fmt.Sprintf("%s-%d", key, *p.Producer.Iteration) + } + } + // Include the value hash, in cases like the iterator case where + // iterations propagate values to upstream tasks, the iteration + // index is not propagated (like in a for-loop-task), so we need + // to include the value hash to avoid collisions. + valueHash, err := hashProtoValue(p.GetValue()) + if err != nil { + glog.Errorf("Failed to hash parameter value: %v", err) + } + key = fmt.Sprintf("%s-%s", key, valueHash) + return key } - if original.PodName == "" { - original.PodName = patch.PodName + mergedParams := map[string]*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{} + for _, p := range oldParams { + key := makeKey(p) + mergedParams[key] = p } - if original.MLMDExecutionID == "" { - original.MLMDExecutionID = patch.MLMDExecutionID + for _, p := range newParams { + key := makeKey(p) + mergedParams[key] = p } - if original.CreatedTimestamp == 0 { - original.CreatedTimestamp = patch.CreatedTimestamp + paramsSlice := make([]*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter, 0, len(mergedParams)) + for _, p := range mergedParams { + paramsSlice = append(paramsSlice, p) } - if original.StartedTimestamp == 0 { - original.StartedTimestamp = patch.StartedTimestamp + parameters, err := model.ProtoSliceToJSONSlice(paramsSlice) + if err != nil { + return nil, err } - if original.FinishedTimestamp == 0 { - original.FinishedTimestamp = patch.FinishedTimestamp + return parameters, nil +} + +func (s *TaskStore) GetChildTasks(taskID string) ([]*model.Task, error) { + toSQL, args, err := sq. + Select(taskColumns...). + From("tasks"). + Where(sq.Eq{"ParentTaskUUID": taskID}). + ToSql() + + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to create query to get child tasks: %v", err.Error()) } - if original.Fingerprint == "" { - original.Fingerprint = patch.Fingerprint + + rows, err := s.db.Query(toSQL, args...) + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to get child tasks: %v", err.Error()) } - if original.Name == "" { - original.Name = patch.Name + defer rows.Close() + + return s.scanRows(rows) +} + +// GetTaskCountForRun returns the total count of tasks for a given run ID. +// This is a lightweight operation that doesn't perform task hydration. +func (s *TaskStore) GetTaskCountForRun(runID string) (int, error) { + sizeSQL, sizeArgs, err := sq. + Select("count(*)"). + From("tasks"). + Where(sq.Eq{"RunUUID": runID}). + ToSql() + + if err != nil { + return 0, util.NewInternalServerError(err, "Failed to create task count query: %v", err.Error()) } - if original.ParentTaskId == "" { - original.ParentTaskId = patch.ParentTaskId + + sizeRow, err := s.db.Query(sizeSQL, sizeArgs...) + if err != nil { + return 0, util.NewInternalServerError(err, "Failed to get task count: %v", err.Error()) } - if original.State.ToV2() == model.RuntimeStateUnspecified { - original.State = patch.State.ToV2() + defer sizeRow.Close() + + var total int + sizeRow.Next() + if err := sizeRow.Scan(&total); err != nil { + return 0, util.NewInternalServerError(err, "Failed to scan task count: %v", err.Error()) } - if original.MLMDInputs == "" { - original.MLMDInputs = patch.MLMDInputs + + return total, nil +} + +func hashProtoValue(v *structpb.Value) (string, error) { + // Deterministic binary marshal + b, err := proto.MarshalOptions{Deterministic: true}.Marshal(v) + if err != nil { + return "", err } - if original.MLMDOutputs == "" { - original.MLMDOutputs = patch.MLMDOutputs + sum := sha256.Sum256(b) + return hex.EncodeToString(sum[:]), nil +} + +// getLastTaskState retrieves the state from the last entry in task state history. +// Returns 0 (unspecified) if history is empty or cannot be parsed. +func getLastTaskState(history model.JSONSlice) model.TaskStatus { + if len(history) == 0 { + return 0 } - if original.StateHistory == nil { - original.StateHistory = patch.StateHistory + + // Convert JSONSlice to TaskStatus protobuf slice + typeFunc := func() *apiv2beta1.PipelineTaskDetail_TaskStatus { + return &apiv2beta1.PipelineTaskDetail_TaskStatus{} } - if len(original.ChildrenPods) == 0 { - original.ChildrenPods = patch.ChildrenPods + + histProtos, err := model.JSONSliceToProtoSlice(history, typeFunc) + if err != nil || len(histProtos) == 0 { + glog.Warningf("Failed to parse state history: %v", err) + return 0 } + + lastEntry := histProtos[len(histProtos)-1] + return model.TaskStatus(lastEntry.GetState()) +} + +// DeleteTasksForRun deletes all tasks associated with a specific run. +// This should be called before deleting a run to avoid foreign key constraint violations. +func (s *TaskStore) DeleteTasksForRun(tx *sql.Tx, runUUID string) error { + deleteSQL, deleteArgs, err := sq.Delete(tableName).Where(sq.Eq{"RunUUID": runUUID}).ToSql() + if err != nil { + return util.NewInternalServerError(err, "Failed to create query to delete tasks for run: %s", runUUID) + } + + var result sql.Result + if tx != nil { + result, err = tx.Exec(deleteSQL, deleteArgs...) + } else { + result, err = s.db.Exec(deleteSQL, deleteArgs...) + } + + if err != nil { + return util.NewInternalServerError(err, "Failed to delete tasks for run %s from table", runUUID) + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + glog.Warningf("Failed to get rows affected when deleting tasks for run %s: %v", runUUID, err) + } else { + glog.V(4).Infof("Deleted %d tasks for run %s", rowsAffected, runUUID) + } + + return nil } diff --git a/backend/src/apiserver/storage/task_store_test.go b/backend/src/apiserver/storage/task_store_test.go index 3e5103ce6ff..9d744f51374 100644 --- a/backend/src/apiserver/storage/task_store_test.go +++ b/backend/src/apiserver/storage/task_store_test.go @@ -15,611 +15,1161 @@ package storage import ( - "reflect" + "fmt" "testing" + apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/filter" "github.com/kubeflow/pipelines/backend/src/apiserver/list" "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/common/util" "github.com/stretchr/testify/assert" + "google.golang.org/grpc/codes" + "google.golang.org/protobuf/types/known/structpb" + "google.golang.org/protobuf/types/known/timestamppb" ) const ( - defaultFakeTaskId = "123e4567-e89b-12d3-a456-426655440010" - defaultFakeTaskIdTwo = "123e4567-e89b-12d3-a456-426655440011" - defaultFakeTaskIdThree = "123e4567-e89b-12d3-a456-426655440012" - defaultFakeTaskIdFour = "123e4567-e89b-12d3-a456-426655440013" - defaultFakeTaskIdFive = "123e4567-e89b-12d3-a456-426655440014" - defaultFakeTaskIdSix = "123e4567-e89b-12d3-a456-426655440016" + testUUID1 = "123e4567-e89b-12d3-a456-426655441011" + testUUID2 = "123e4567-e89b-12d3-a456-426655441012" + testUUID3 = "123e4567-e89b-12d3-a456-426655441013" + testUUID4 = "123e4567-e89b-12d3-a456-426655441014" + testUUID5 = "123e4567-e89b-12d3-a456-426655441015" ) -func initializeTaskStore() (*DB, *TaskStore) { +// initializeTaskStore sets up a fake DB with a couple of runs and returns a TaskStore ready for testing. +func initializeTaskStore() (*DB, *TaskStore, *RunStore) { db := NewFakeDBOrFatal() - expStore := NewExperimentStore(db, util.NewFakeTimeForEpoch(), util.NewFakeUUIDGeneratorOrFatal(defaultFakeExpId, nil)) - expStore.CreateExperiment(&model.Experiment{Name: "e1", Namespace: "ns1"}) - expStore.uuid = util.NewFakeUUIDGeneratorOrFatal(defaultFakeExpIdTwo, nil) - expStore.CreateExperiment(&model.Experiment{Name: "e2", Namespace: "ns2"}) - runStore := NewRunStore(db, util.NewFakeTimeForEpoch()) - + fakeTime := util.NewFakeTimeForEpoch() + // Seed a couple of runs to satisfy Task foreign key constraint. + runStore := NewRunStore(db, fakeTime) run1 := &model.Run{ - UUID: defaultFakeRunId, - ExperimentId: defaultFakeExpId, - DisplayName: "run1", - K8SName: "workflow-name", - Namespace: "ns1", - ServiceAccount: "pipeline-runner", - StorageState: model.StorageStateAvailable, + UUID: "run-1", + ExperimentId: "exp-1", + K8SName: "run1", + DisplayName: "run1", + StorageState: model.StorageStateAvailable, + Namespace: "ns1", RunDetails: model.RunDetails{ - CreatedAtInSec: 4, - Conditions: "done", - State: model.RuntimeStateSucceeded, - WorkflowRuntimeManifest: "workflow1", + CreatedAtInSec: 1, + ScheduledAtInSec: 1, + Conditions: "Running", + State: model.RuntimeStateRunning, }, } - runStore.CreateRun(run1) - run2 := &model.Run{ - UUID: defaultFakeRunIdTwo, - ExperimentId: defaultFakeExpIdTwo, - DisplayName: "run2", - K8SName: "workflow-name", - Namespace: "ns2", - ServiceAccount: "pipeline-runner", - StorageState: model.StorageStateAvailable, + UUID: "run-2", + ExperimentId: "exp-2", + K8SName: "run2", + DisplayName: "run2", + StorageState: model.StorageStateAvailable, + Namespace: "ns2", RunDetails: model.RunDetails{ - CreatedAtInSec: 4, - Conditions: "done", - State: model.RuntimeStateSucceeded, - WorkflowRuntimeManifest: "workflow2", + CreatedAtInSec: 2, + ScheduledAtInSec: 2, + Conditions: "Succeeded", + State: model.RuntimeStateSucceeded, }, } + _, _ = runStore.CreateRun(run1) + _, _ = runStore.CreateRun(run2) - run3 := &model.Run{ - UUID: defaultFakeRunIdThree, - ExperimentId: defaultFakeExpId, - DisplayName: "run3", - K8SName: "workflow-name", - Namespace: "ns1", - ServiceAccount: "pipeline-runner", - StorageState: model.StorageStateAvailable, - RunDetails: model.RunDetails{ - CreatedAtInSec: 5, - Conditions: "Running", - State: model.RuntimeStateRunning, - WorkflowRuntimeManifest: "workflow1", - }, + // Create task store with controllable UUID generator + taskStore := NewTaskStore(db, fakeTime, util.NewFakeUUIDGeneratorOrFatal(testUUID1, nil)) + return db, taskStore, runStore +} + +func createTaskPod(name, uid string, typ apiv2beta1.PipelineTaskDetail_TaskPodType) *apiv2beta1.PipelineTaskDetail_TaskPod { + return &apiv2beta1.PipelineTaskDetail_TaskPod{ + Name: name, + Uid: uid, + Type: typ, + } +} + +func createTaskPodsAsJSONSlice(pods ...*apiv2beta1.PipelineTaskDetail_TaskPod) model.JSONSlice { + podsAsSlice, err := model.ProtoSliceToJSONSlice(pods) + if err != nil { + panic(err) + } + return podsAsSlice +} + +// Minimal test to ensure model<->DB mapping remains valid. +func TestTaskAPIFieldMap(t *testing.T) { + for _, modelField := range (&model.Task{}).APIToModelFieldMap() { + assert.Contains(t, taskColumns, modelField) + } +} + +func TestCreateTask_Success(t *testing.T) { + db, taskStore, _ := initializeTaskStore() + defer db.Close() + pods := createTaskPodsAsJSONSlice(createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)) + task := &model.Task{ + Namespace: "ns1", + RunUUID: "run-1", + Pods: pods, + Fingerprint: "fp-1", + Name: "taskA", + ParentTaskUUID: strPTR(""), + State: 1, + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: model.JSONData(map[string]interface{}{"k": "v"}), } - // runStore.CreateRun(run1) - runStore.CreateRun(run2) - runStore.CreateRun(run3) - - taskStore := NewTaskStore(db, util.NewFakeTimeForEpoch(), util.NewFakeUUIDGeneratorOrFatal(defaultFakeTaskId, nil)) - task1 := &model.Task{ - Namespace: "ns1", - PodName: "pod1", - PipelineName: "namespace/ns1/pipeline/pipeline1", - RunID: run1.UUID, - MLMDExecutionID: "1", - StartedTimestamp: 1, - FinishedTimestamp: 2, - Fingerprint: "1", - } - taskStore.CreateTask(task1) - - taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(defaultFakeTaskIdTwo, nil) - task2 := &model.Task{ - Namespace: "ns1", - PodName: "pod2", - PipelineName: "namespace/ns1/pipeline/pipeline1", - RunID: run1.UUID, - MLMDExecutionID: "2", - StartedTimestamp: 3, - FinishedTimestamp: 4, - Fingerprint: "2", - } - taskStore.CreateTask(task2) - - taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(defaultFakeTaskIdThree, nil) - task3 := &model.Task{ - Namespace: "ns1", - PodName: "pod3", - PipelineName: "namespace/ns1/pipeline/pipeline1", - RunID: run3.UUID, - MLMDExecutionID: "3", - StartedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "1", - } - taskStore.CreateTask(task3) - - taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(defaultFakeTaskIdFour, nil) - task4 := &model.Task{ - Namespace: "ns2", - PodName: "pod4", - PipelineName: "namespace/ns2/pipeline/pipeline2", - RunID: run2.UUID, - MLMDExecutionID: "4", - StartedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "1", - } - taskStore.CreateTask(task4) - - taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(defaultFakeTaskIdFive, nil) - task5 := &model.Task{ - Namespace: "ns2", - PodName: "pod5", - PipelineName: "namespace/ns2/pipeline/pipeline2", - RunID: run2.UUID, - MLMDExecutionID: "5", - StartedTimestamp: 7, - FinishedTimestamp: 8, - Fingerprint: "10", - } - taskStore.CreateTask(task5) - taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(defaultFakeTaskIdSix, nil) - return db, taskStore + created, err := taskStore.CreateTask(task) + assert.NoError(t, err) + assert.Equal(t, testUUID1, created.UUID) + // CreatedAt and StartedInSec should be auto-populated to the same timestamp (fake time starts from 0 -> 1) + assert.Equal(t, created.CreatedAtInSec, created.StartedInSec) + assert.Greater(t, created.CreatedAtInSec, int64(0)) + + // Verify it can be fetched back + fetched, err := taskStore.GetTask(created.UUID) + assert.NoError(t, err) + assert.Equal(t, created.UUID, fetched.UUID) + assert.Equal(t, "ns1", fetched.Namespace) + assert.Equal(t, "run-1", fetched.RunUUID) + assert.Equal(t, pods, fetched.Pods) + assert.Equal(t, "fp-1", fetched.Fingerprint) + assert.Equal(t, "taskA", fetched.Name) + assert.Equal(t, model.TaskStatus(1), fetched.State) + assert.Equal(t, model.TaskType(0), fetched.Type) +} + +func TestGetTask_NotFound(t *testing.T) { + db, taskStore, _ := initializeTaskStore() + defer db.Close() + _, err := taskStore.GetTask(testUUID1) + assert.Equal(t, codes.NotFound, err.(*util.UserError).ExternalStatusCode()) } -func TestListTasks(t *testing.T) { - db, taskStore := initializeTaskStore() +func TestListTasks_BasicAndFilters(t *testing.T) { + db, taskStore, _ := initializeTaskStore() defer db.Close() - expectedFirstPageTasks := []*model.Task{ - { - UUID: defaultFakeTaskIdFour, - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - RunID: defaultFakeRunIdTwo, - PodName: "pod4", - MLMDExecutionID: "4", - StartedTimestamp: 5, - CreatedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "1", + // Create a parent task and two child tasks under different runs/namespaces + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(testUUID1, nil) + parent, err := taskStore.CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: "run-1", + Pods: createTaskPodsAsJSONSlice(createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)), + Fingerprint: "fp-parent", + State: 1, + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: model.JSONData{}, + }) + assert.NoError(t, err) + + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(testUUID2, nil) + _, err = taskStore.CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: "run-1", + ParentTaskUUID: strPTR(parent.UUID), + Pods: createTaskPodsAsJSONSlice(createTaskPod("p2", "uid2", apiv2beta1.PipelineTaskDetail_EXECUTOR)), + Fingerprint: "fp-c1", + State: 1, + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: model.JSONData{}, + }) + assert.NoError(t, err) + + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(testUUID3, nil) + _, err = taskStore.CreateTask(&model.Task{ + Namespace: "ns2", + RunUUID: "run-2", + ParentTaskUUID: strPTR(parent.UUID), + Pods: createTaskPodsAsJSONSlice(createTaskPod("p3", "uid3", apiv2beta1.PipelineTaskDetail_EXECUTOR)), + Fingerprint: "fp-c2", + State: 1, + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: model.JSONData{}, + }) + assert.NoError(t, err) + + // List all tasks + opts, _ := list.NewOptions(&model.Task{}, 10, "", nil) + all, total, npt, err := taskStore.ListTasks(&model.FilterContext{}, opts) + assert.NoError(t, err) + assert.Equal(t, 3, len(all)) + assert.Equal(t, 3, total) + assert.Equal(t, "", npt) + + // Filter by RunUUID + opts2, _ := list.NewOptions(&model.Task{}, 10, "", nil) + runFiltered, total2, _, err := taskStore.ListTasks(&model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.RunResourceType, ID: "run-1"}}, opts2) + assert.NoError(t, err) + assert.Equal(t, 2, len(runFiltered)) + assert.Equal(t, 2, total2) + + // Filter by ParentTaskUUID (child tasks) + opts3, _ := list.NewOptions(&model.Task{}, 10, "", nil) + children, total3, _, err := taskStore.ListTasks(&model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.TaskResourceType, ID: parent.UUID}}, opts3) + assert.NoError(t, err) + assert.Equal(t, 2, len(children)) + assert.Equal(t, 2, total3) +} + +func TestUpdateTask_Success(t *testing.T) { + db, taskStore, _ := initializeTaskStore() + defer db.Close() + + pod1 := createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR) + pod2 := createTaskPod("p2", "uid2", apiv2beta1.PipelineTaskDetail_EXECUTOR) + // Create a task + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(testUUID1, nil) + created, err := taskStore.CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: "run-1", + Pods: createTaskPodsAsJSONSlice(pod1), + Fingerprint: "fp-0", + State: 1, + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: map[string]interface{}{}, + }) + assert.NoError(t, err) + + // Deep copy the task for updates + updatedTask := &model.Task{ + UUID: created.UUID, + Namespace: created.Namespace, + RunUUID: created.RunUUID, + Pods: created.Pods, + Fingerprint: created.Fingerprint, + Name: created.Name, + DisplayName: created.DisplayName, + ParentTaskUUID: created.ParentTaskUUID, + State: created.State, + StateHistory: created.StateHistory, + InputParameters: created.InputParameters, + OutputParameters: created.OutputParameters, + Type: created.Type, + TypeAttrs: created.TypeAttrs, + CreatedAtInSec: created.CreatedAtInSec, + StartedInSec: created.StartedInSec, + } + + // Update some fields + updatedTask.Name = "updatedName" + updatedTask.Fingerprint = "fp-1" + updatedTask.Pods = createTaskPodsAsJSONSlice(pod1, pod2) + updatedTask.State = 2 + updated, err := taskStore.UpdateTask(updatedTask) + assert.NoError(t, err) + assert.Equal(t, created.UUID, updated.UUID) + assert.Equal(t, "updatedName", updated.Name) + assert.Equal(t, "fp-1", updated.Fingerprint) + assert.Equal(t, createTaskPodsAsJSONSlice(pod1, pod2), updated.Pods) + assert.Equal(t, model.TaskStatus(2), updated.State) +} + +func TestUpdateTask_MergesParameters(t *testing.T) { + db, taskStore, _ := initializeTaskStore() + defer db.Close() + + // Create a task with initial input parameters + val1, _ := structpb.NewValue("initial-input") + initialParam := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: val1, + ParameterKey: "common-param", + Type: apiv2beta1.IOType_COMPONENT_INPUT, + } + initialParams, err := model.ProtoSliceToJSONSlice([]*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{initialParam}) + assert.NoError(t, err) + + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(testUUID1, nil) + created, err := taskStore.CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: "run-1", + Pods: createTaskPodsAsJSONSlice(createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)), + Fingerprint: "fp-0", + State: 1, + StateHistory: model.JSONSlice{}, + InputParameters: initialParams, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: map[string]interface{}{}, + }) + assert.NoError(t, err) + + // Simulate first update from iteration 0 + valIter0, _ := structpb.NewValue("output-from-iter-0") + iter0Param := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: valIter0, + ParameterKey: "loop-output", + Type: apiv2beta1.IOType_ITERATOR_OUTPUT, + Producer: &apiv2beta1.IOProducer{ + TaskName: "loop-task", + Iteration: int64PTR(0), }, } - expectedSecondPageTasks := []*model.Task{ - { - UUID: defaultFakeTaskIdFive, - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - RunID: defaultFakeRunIdTwo, - MLMDExecutionID: "5", - PodName: "pod5", - CreatedTimestamp: 7, - StartedTimestamp: 7, - FinishedTimestamp: 8, - Fingerprint: "10", + iter0Params, err := model.ProtoSliceToJSONSlice([]*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{iter0Param}) + assert.NoError(t, err) + + update1 := &model.Task{ + UUID: created.UUID, + OutputParameters: iter0Params, + } + updated1, err := taskStore.UpdateTask(update1) + assert.NoError(t, err) + + // Verify first update has both initial input param and iter0 output param + assert.Equal(t, 1, len(updated1.InputParameters)) + assert.Equal(t, 1, len(updated1.OutputParameters)) + + // Simulate second update from iteration 1 + valIter1, _ := structpb.NewValue("output-from-iter-1") + iter1Param := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: valIter1, + ParameterKey: "loop-output", + Type: apiv2beta1.IOType_ITERATOR_OUTPUT, + Producer: &apiv2beta1.IOProducer{ + TaskName: "loop-task", + Iteration: int64PTR(1), }, } + iter1Params, err := model.ProtoSliceToJSONSlice([]*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{iter1Param}) + assert.NoError(t, err) - opts, err := list.NewOptions(&model.Task{}, 1, "", nil) - assert.Nil(t, err) - - tasks, total_size, nextPageToken, err := taskStore.ListTasks( - &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.RunResourceType, ID: defaultFakeRunIdTwo}}, opts) - assert.Nil(t, err) - assert.Equal(t, 2, total_size) - assert.Equal(t, expectedFirstPageTasks, tasks, "Unexpected Tasks listed") - assert.NotEmpty(t, nextPageToken) - - opts, err = list.NewOptionsFromToken(nextPageToken, 1) - assert.Nil(t, err) - tasks, total_size, nextPageToken, err = taskStore.ListTasks( - &model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.RunResourceType, ID: defaultFakeRunIdTwo}}, opts) - assert.Nil(t, err) - assert.Equal(t, 2, total_size) - assert.Equal(t, expectedSecondPageTasks, tasks, "Unexpected Tasks listed") - assert.Empty(t, nextPageToken) + update2 := &model.Task{ + UUID: created.UUID, + OutputParameters: iter1Params, + } + updated2, err := taskStore.UpdateTask(update2) + assert.NoError(t, err) + + // Verify second update preserves both iter0 and iter1 output params + assert.Equal(t, 1, len(updated2.InputParameters)) + assert.Equal(t, 2, len(updated2.OutputParameters), "Should have both iteration 0 and 1 output parameters") + + // Verify both iterations are present + typeFunc := func() *apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter { + return &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{} + } + outputProtos, err := model.JSONSliceToProtoSlice(updated2.OutputParameters, typeFunc) + assert.NoError(t, err) + + iterations := make(map[int64]bool) + for _, p := range outputProtos { + if p.Producer != nil && p.Producer.Iteration != nil { + iterations[*p.Producer.Iteration] = true + } + } + assert.True(t, iterations[0], "Should have iteration 0 parameter") + assert.True(t, iterations[1], "Should have iteration 1 parameter") } -func TestTaskStore_GetTask(t *testing.T) { - db, taskStore := initializeTaskStore() +func TestGetChildTasks_ReturnsChildren(t *testing.T) { + db, taskStore, _ := initializeTaskStore() defer db.Close() - task1 := &model.Task{ - UUID: defaultFakeTaskIdFour, - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - RunID: defaultFakeRunIdTwo, - PodName: "pod4", - MLMDExecutionID: "4", - CreatedTimestamp: 5, - StartedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "1", - } - task2 := &model.Task{ - UUID: defaultFakeTaskIdFive, - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - PodName: "pod5", - RunID: defaultFakeRunIdTwo, - MLMDExecutionID: "5", - CreatedTimestamp: 7, - StartedTimestamp: 7, - FinishedTimestamp: 8, - Fingerprint: "10", - } - - tests := []struct { - name string - id string - want *model.Task - wantErr bool - errMsg string - }{ - { - "valid -task 1", - defaultFakeTaskIdFour, - task1, - false, - "", - }, - { - "valid -task 2", - defaultFakeTaskIdFive, - task2, - false, - "", - }, - { - "not found", - "This does not exist", - nil, - true, - "not found", + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(testUUID1, nil) + parent, err := taskStore.CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: "run-1", + Name: "parent", + DisplayName: "Parent Task", + Pods: createTaskPodsAsJSONSlice(createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)), + Fingerprint: "fp-p", + State: 1, + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: map[string]interface{}{}, + }) + assert.NoError(t, err) + + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(testUUID2, nil) + _, err = taskStore.CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: "run-1", + ParentTaskUUID: strPTR(parent.UUID), + Name: "child-a", + DisplayName: "First Child", + Pods: createTaskPodsAsJSONSlice(createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)), + Fingerprint: "fp-a", + State: 1, + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: map[string]interface{}{}, + }) + assert.NoError(t, err) + + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(testUUID3, nil) + _, err = taskStore.CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: "run-1", + ParentTaskUUID: strPTR(parent.UUID), + Name: "child-b", + DisplayName: "Second Child", + Pods: createTaskPodsAsJSONSlice(createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)), + Fingerprint: "fp-b", + State: 2, + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: map[string]interface{}{}, + }) + assert.NoError(t, err) + + children, err := taskStore.GetChildTasks(parent.UUID) + assert.NoError(t, err) + assert.Equal(t, 2, len(children)) +} + +func TestListTasks_FilterPredicates_EqualsOnColumns(t *testing.T) { + db, taskStore, _ := initializeTaskStore() + defer db.Close() + + // Seed 3 tasks across 2 runs with differing names, statuses and fingerprints. + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(testUUID1, nil) + _, err := taskStore.CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: "run-1", + Name: "alpha", + DisplayName: "Alpha Task", + Pods: createTaskPodsAsJSONSlice(createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)), + Fingerprint: "fp-alpha", + State: 1, + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: map[string]interface{}{}, + }) + assert.NoError(t, err) + + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(testUUID2, nil) + _, err = taskStore.CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: "run-1", + Name: "beta", + DisplayName: "Beta Task", + Pods: createTaskPodsAsJSONSlice(createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)), + Fingerprint: "fp-beta", + State: 1, + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: map[string]interface{}{}, + }) + assert.NoError(t, err) + + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(testUUID3, nil) + _, err = taskStore.CreateTask(&model.Task{ + Namespace: "ns2", + RunUUID: "run-2", + Name: "gamma", + DisplayName: "Gamma Task", + Pods: createTaskPodsAsJSONSlice(createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)), + Fingerprint: "fp-gamma", + State: 2, + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: map[string]interface{}{}, + }) + assert.NoError(t, err) + + // name == "beta" + f1Proto := &apiv2beta1.Filter{ + Predicates: []*apiv2beta1.Predicate{ + {Key: "name", Operation: apiv2beta1.Predicate_EQUALS, Value: &apiv2beta1.Predicate_StringValue{StringValue: "beta"}}, + }} + f1, err := filter.New(f1Proto) + assert.NoError(t, err) + opts1, err := list.NewOptions(&model.Task{}, 20, "", f1) + assert.NoError(t, err) + res1, total1, _, err := taskStore.ListTasks(&model.FilterContext{}, opts1) + assert.NoError(t, err) + assert.Equal(t, 1, len(res1)) + assert.Equal(t, 1, total1) + assert.Equal(t, "beta", res1[0].Name) + + // status == 2 + f2Proto := &apiv2beta1.Filter{ + Predicates: []*apiv2beta1.Predicate{ + {Key: "status", Operation: apiv2beta1.Predicate_EQUALS, Value: &apiv2beta1.Predicate_IntValue{IntValue: 2}}, }, } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := taskStore.GetTask(tt.id) - if tt.wantErr { - assert.NotNil(t, err) - assert.Contains(t, err.Error(), tt.errMsg) - } else { - assert.Nil(t, err) - } - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("TaskStore.GetTask() = %v, want %v", got, tt.want) - } + f2, err := filter.New(f2Proto) + assert.NoError(t, err) + opts2, err := list.NewOptions(&model.Task{}, 20, "", f2) + assert.NoError(t, err) + res2, total2, _, err := taskStore.ListTasks(&model.FilterContext{}, opts2) + assert.NoError(t, err) + assert.Equal(t, 1, len(res2)) + assert.Equal(t, 1, total2) + assert.Equal(t, model.TaskStatus(2), res2[0].State) + + // cache_fingerprint == "fp-alpha" + f3Proto := &apiv2beta1.Filter{ + Predicates: []*apiv2beta1.Predicate{ + {Key: "cache_fingerprint", Operation: apiv2beta1.Predicate_EQUALS, Value: &apiv2beta1.Predicate_StringValue{StringValue: "fp-alpha"}}, + }} + f3, err := filter.New(f3Proto) + assert.NoError(t, err) + opts3, err := list.NewOptions(&model.Task{}, 20, "", f3) + assert.NoError(t, err) + res3, total3, _, err := taskStore.ListTasks(&model.FilterContext{}, opts3) + assert.NoError(t, err) + assert.Equal(t, 1, len(res3)) + assert.Equal(t, 1, total3) + assert.Equal(t, "fp-alpha", res3[0].Fingerprint) + + // Combined: run_id == "run-1" AND status == 1 + f4Proto := &apiv2beta1.Filter{ + Predicates: []*apiv2beta1.Predicate{ + {Key: "run_id", Operation: apiv2beta1.Predicate_EQUALS, Value: &apiv2beta1.Predicate_StringValue{StringValue: "run-1"}}, + {Key: "status", Operation: apiv2beta1.Predicate_EQUALS, Value: &apiv2beta1.Predicate_IntValue{IntValue: 1}}, + }} + f4, err := filter.New(f4Proto) + assert.NoError(t, err) + opts4, err := list.NewOptions(&model.Task{}, 20, "", f4) + assert.NoError(t, err) + res4, total4, _, err := taskStore.ListTasks(&model.FilterContext{}, opts4) + assert.NoError(t, err) + assert.Equal(t, 2, len(res4)) + assert.Equal(t, 2, total4) +} + +func TestListTasks_PaginationWithToken(t *testing.T) { + // Setup + db, taskStore, _ := initializeTaskStore() + defer db.Close() + + // Seed 5 tasks. FakeTime.Now() increments per call, so CreatedAtInSec/StartedInSec are strictly increasing + uuids := []string{testUUID1, testUUID2, testUUID3, testUUID4, testUUID5} + for i, id := range uuids { + // Control UUID so key tie-breaker is predictable if same timestamp ever occurred + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(id, nil) + _, err := taskStore.CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: "run-1", + Name: fmt.Sprintf("task-%d", i+1), + Pods: createTaskPodsAsJSONSlice(createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)), + Fingerprint: fmt.Sprintf("fp-%d", i+1), + State: 1, + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: map[string]interface{}{}, }) + assert.NoError(t, err) } + + // Page 1 + opts1, _ := list.NewOptions(&model.Task{}, 2, "", nil) + page1, total1, token1, err := taskStore.ListTasks(&model.FilterContext{}, opts1) + assert.NoError(t, err) + assert.Equal(t, 2, len(page1)) + assert.Equal(t, 5, total1) + assert.NotEmpty(t, token1) + + // Page 2 using token + opts2, err := list.NewOptionsFromToken(token1, 2) + assert.NoError(t, err) + page2, total2, token2, err := taskStore.ListTasks(&model.FilterContext{}, opts2) + assert.NoError(t, err) + assert.Equal(t, 2, len(page2)) + assert.Equal(t, 5, total2) + assert.NotEmpty(t, token2) + + // Page 3 using token (should be the last 1 item, then empty token) + opts3, err := list.NewOptionsFromToken(token2, 2) + assert.NoError(t, err) + page3, total3, token3, err := taskStore.ListTasks(&model.FilterContext{}, opts3) + assert.NoError(t, err) + assert.Equal(t, 1, len(page3)) + assert.Equal(t, 5, total3) + assert.Empty(t, token3) } -func TestTaskStore_patchWithExistingTasks(t *testing.T) { - db, taskStore := initializeTaskStore() +func TestTaskParameters_PersistAndFetch(t *testing.T) { + db, taskStore, _ := initializeTaskStore() defer db.Close() - tests := []struct { - name string - tasks []*model.Task - want []*model.Task - wantErr bool - errMsg string - }{ - { - "valid -task 1", - []*model.Task{ - { - PodName: "pod4", - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - RunID: defaultFakeRunIdTwo, - MLMDExecutionID: "4", - StartedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "1", - }, - }, - []*model.Task{ - { - UUID: defaultFakeTaskIdFour, - CreatedTimestamp: 5, - PodName: "pod4", - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - RunID: defaultFakeRunIdTwo, - MLMDExecutionID: "4", - StartedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "1", - State: model.RuntimeStateUnspecified, - }, - }, - false, - "", + // Build two simple IOParameter protos for inputs and outputs + inVal, _ := structpb.NewValue("in-val") + outVal, _ := structpb.NewValue("out-val") + inParam := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: inVal, + ParameterKey: "in-name", + } + outParam := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: outVal, + ParameterKey: "param-y", + Producer: &apiv2beta1.IOProducer{ + TaskName: "task-x", + }, + } + inParams, err := model.ProtoSliceToJSONSlice([]*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{inParam}) + assert.NoError(t, err) + outParams, err := model.ProtoSliceToJSONSlice([]*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{outParam}) + assert.NoError(t, err) + + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(testUUID1, nil) + created, err := taskStore.CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: "run-1", + Pods: createTaskPodsAsJSONSlice(createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)), + Fingerprint: "fp-param", + State: 1, + StateHistory: model.JSONSlice{}, + InputParameters: inParams, + OutputParameters: outParams, + Type: 0, + TypeAttrs: model.JSONData{}, + }) + assert.NoError(t, err) + + fetched, err := taskStore.GetTask(created.UUID) + assert.NoError(t, err) + assert.Equal(t, 1, len(fetched.InputParameters)) + assert.Equal(t, 1, len(fetched.OutputParameters)) +} + +func TestHydrateArtifactsForTask_GetAndList(t *testing.T) { + db, taskStore, _ := initializeTaskStore() + defer db.Close() + + // Create a task under run-1 + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(testUUID1, nil) + task, err := taskStore.CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: "run-1", + Pods: createTaskPodsAsJSONSlice(createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)), + Fingerprint: "fp-art", + State: 1, + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: model.JSONData{}, + }) + assert.NoError(t, err) + + // Create two artifacts via the ArtifactStore + artifactStore := NewArtifactStore(db, util.NewFakeTimeForEpoch(), util.NewFakeUUIDGeneratorOrFatal(testUUID2, nil)) + artIn, err := artifactStore.CreateArtifact(&model.Artifact{ + Namespace: "ns1", + Type: 0, + URI: strPTR("s3://bucket/in"), + Name: "in-art", + }) + assert.NoError(t, err) + artifactStore.uuid = util.NewFakeUUIDGeneratorOrFatal(testUUID3, nil) + artOut, err := artifactStore.CreateArtifact(&model.Artifact{ + Namespace: "ns1", + Type: 0, + URI: strPTR("s3://bucket/out"), + Name: "out-art", + }) + assert.NoError(t, err) + + // Link artifacts to task via artifact_tasks + ats1 := NewArtifactTaskStore(db, util.NewFakeUUIDGeneratorOrFatal("aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaa1", nil)) + // Input link with no producer fields -> ResolvedValue + _, err = ats1.CreateArtifactTask(&model.ArtifactTask{ + ArtifactID: artIn.UUID, + TaskID: task.UUID, + Type: model.IOType(apiv2beta1.IOType_COMPONENT_INPUT), + RunUUID: task.RunUUID, + ArtifactKey: "input-key", + }) + assert.NoError(t, err) + // Output link with producer fields -> PipelineChannel + ats2 := NewArtifactTaskStore(db, util.NewFakeUUIDGeneratorOrFatal("aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaa2", nil)) + _, err = ats2.CreateArtifactTask(&model.ArtifactTask{ + ArtifactID: artOut.UUID, + TaskID: task.UUID, + Type: model.IOType(apiv2beta1.IOType_OUTPUT), + RunUUID: task.RunUUID, + Producer: model.JSONData{ + "taskName": "producer-task", + }, + ArtifactKey: "output-key", + }) + assert.NoError(t, err) + + // Verify GetTask hydrates artifacts + fetched, err := taskStore.GetTask(task.UUID) + assert.NoError(t, err) + if assert.Equal(t, 1, len(fetched.InputArtifactsHydrated)) { + ia := fetched.InputArtifactsHydrated[0] + assert.Equal(t, "input-key", ia.Key) + if assert.NotNil(t, ia.Value) { + assert.Equal(t, "in-art", ia.Value.Name) + } + } + if assert.Equal(t, 1, len(fetched.OutputArtifactsHydrated)) { + oa := fetched.OutputArtifactsHydrated[0] + assert.NotNil(t, oa.Producer) + assert.Equal(t, "producer-task", oa.Producer.TaskName) + assert.Equal(t, "output-key", oa.Key) + if assert.NotNil(t, oa.Value) { + assert.Equal(t, "out-art", oa.Value.Name) + } + } + + // Verify ListTasks hydrates artifacts as well + opts, _ := list.NewOptions(&model.Task{}, 10, "", nil) + tasks, _, _, err := taskStore.ListTasks(&model.FilterContext{ReferenceKey: &model.ReferenceKey{Type: model.RunResourceType, ID: task.RunUUID}}, opts) + assert.NoError(t, err) + var found *model.Task + for _, tsk := range tasks { + if tsk.UUID == task.UUID { + found = tsk + break + } + } + if assert.NotNil(t, found) { + assert.Equal(t, 1, len(found.InputArtifactsHydrated)) + assert.Equal(t, 1, len(found.OutputArtifactsHydrated)) + } +} + +func int64PTR(i int64) *int64 { return &i } + +func TestMergeParameters_EmptySlices(t *testing.T) { + // Test merging nil slices - returns empty slice (semantically equivalent to nil) + result, err := mergeParameters(nil, nil) + assert.NoError(t, err) + assert.Equal(t, 0, len(result)) + + // Test merging empty slices + result, err = mergeParameters(model.JSONSlice{}, model.JSONSlice{}) + assert.NoError(t, err) + assert.Equal(t, 0, len(result)) + + // Test merging nil with non-empty + val1, _ := structpb.NewValue("value1") + param1 := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: val1, + ParameterKey: "param1", + Type: apiv2beta1.IOType_COMPONENT_INPUT, + } + params1, err := model.ProtoSliceToJSONSlice([]*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{param1}) + assert.NoError(t, err) + + result, err = mergeParameters(nil, params1) + assert.NoError(t, err) + assert.Equal(t, 1, len(result)) + + result, err = mergeParameters(params1, nil) + assert.NoError(t, err) + assert.Equal(t, 1, len(result)) +} + +func TestMergeParameters_NoOverlap(t *testing.T) { + // Create two parameters with different keys + val1, _ := structpb.NewValue("value1") + param1 := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: val1, + ParameterKey: "param1", + Type: apiv2beta1.IOType_COMPONENT_INPUT, + } + + val2, _ := structpb.NewValue("value2") + param2 := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: val2, + ParameterKey: "param2", + Type: apiv2beta1.IOType_OUTPUT, + } + + oldParams, err := model.ProtoSliceToJSONSlice([]*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{param1}) + assert.NoError(t, err) + newParams, err := model.ProtoSliceToJSONSlice([]*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{param2}) + assert.NoError(t, err) + + result, err := mergeParameters(oldParams, newParams) + assert.NoError(t, err) + assert.Equal(t, 2, len(result)) + + // Convert back to verify both parameters are present + typeFunc := func() *apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter { + return &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{} + } + resultProtos, err := model.JSONSliceToProtoSlice(result, typeFunc) + assert.NoError(t, err) + + keys := make(map[string]bool) + for _, p := range resultProtos { + keys[p.ParameterKey] = true + } + assert.True(t, keys["param1"]) + assert.True(t, keys["param2"]) +} + +func TestMergeParameters_WithProducer_NoIteration(t *testing.T) { + // Create parameters with producer but no iteration + val1, _ := structpb.NewValue("value-from-task1") + param1 := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: val1, + ParameterKey: "output-param", + Type: apiv2beta1.IOType_OUTPUT, + Producer: &apiv2beta1.IOProducer{ + TaskName: "task1", }, - { - "valid -task 2", - []*model.Task{ - { - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - PodName: "pod5", - RunID: defaultFakeRunIdTwo, - MLMDExecutionID: "5", - StartedTimestamp: 7, - FinishedTimestamp: 8, - Fingerprint: "10", - }, - }, - []*model.Task{ - { - UUID: defaultFakeTaskIdFive, - CreatedTimestamp: 7, - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - PodName: "pod5", - RunID: defaultFakeRunIdTwo, - MLMDExecutionID: "5", - StartedTimestamp: 7, - FinishedTimestamp: 8, - State: model.RuntimeStateUnspecified, - Fingerprint: "10", - }, - }, - false, - "", + } + + val2, _ := structpb.NewValue("value-from-task2") + param2 := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: val2, + ParameterKey: "output-param", + Type: apiv2beta1.IOType_OUTPUT, + Producer: &apiv2beta1.IOProducer{ + TaskName: "task2", }, - { - "non-existing", - []*model.Task{ - { - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - PodName: "pod99", - RunID: defaultFakeRunIdTwo, - MLMDExecutionID: "5", - StartedTimestamp: 7, - FinishedTimestamp: 8, - Fingerprint: "10", - }, - }, - []*model.Task{ - { - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - PodName: "pod99", - RunID: defaultFakeRunIdTwo, - MLMDExecutionID: "5", - StartedTimestamp: 7, - FinishedTimestamp: 8, - Fingerprint: "10", - }, - }, - false, - "", + } + + oldParams, err := model.ProtoSliceToJSONSlice([]*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{param1}) + assert.NoError(t, err) + newParams, err := model.ProtoSliceToJSONSlice([]*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{param2}) + assert.NoError(t, err) + + result, err := mergeParameters(oldParams, newParams) + assert.NoError(t, err) + // Different task names create different keys, so we should have 2 parameters + assert.Equal(t, 2, len(result)) + + typeFunc := func() *apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter { + return &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{} + } + resultProtos, err := model.JSONSliceToProtoSlice(result, typeFunc) + assert.NoError(t, err) + + taskNames := make(map[string]bool) + for _, p := range resultProtos { + taskNames[p.Producer.TaskName] = true + } + assert.True(t, taskNames["task1"]) + assert.True(t, taskNames["task2"]) +} + +func TestMergeParameters_WithProducer_WithIteration(t *testing.T) { + // Create parameters with producer including iteration + val1, _ := structpb.NewValue("value-iteration-0") + param1 := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: val1, + ParameterKey: "loop-output", + Type: apiv2beta1.IOType_ITERATOR_OUTPUT, + Producer: &apiv2beta1.IOProducer{ + TaskName: "loop-task", + Iteration: int64PTR(0), }, - { - "empty", - []*model.Task{}, - []*model.Task{}, - false, - "", + } + + val2, _ := structpb.NewValue("value-iteration-1") + param2 := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: val2, + ParameterKey: "loop-output", + Type: apiv2beta1.IOType_ITERATOR_OUTPUT, + Producer: &apiv2beta1.IOProducer{ + TaskName: "loop-task", + Iteration: int64PTR(1), }, - { - "duplicate", - []*model.Task{ - { - PodName: "pod4", - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - RunID: defaultFakeRunIdTwo, - MLMDExecutionID: "4", - StartedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "1", - }, - { - PodName: "pod4", - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - RunID: defaultFakeRunIdTwo, - MLMDExecutionID: "4", - StartedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "1", - }, - { - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - PodName: "pod5", - RunID: defaultFakeRunIdTwo, - State: model.RuntimeStatePaused, - MLMDExecutionID: "5", - StartedTimestamp: 7, - FinishedTimestamp: 8, - Fingerprint: "10", - }, - { - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - PodName: "pod99", - RunID: defaultFakeRunIdTwo, - MLMDExecutionID: "5", - State: model.RuntimeStateCancelling, - StartedTimestamp: 7, - FinishedTimestamp: 8, - Fingerprint: "10", - }, - }, - []*model.Task{ - { - UUID: defaultFakeTaskIdFour, - CreatedTimestamp: 5, - PodName: "pod4", - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - RunID: defaultFakeRunIdTwo, - MLMDExecutionID: "4", - State: model.RuntimeStateUnspecified, - StartedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "1", - }, - { - UUID: defaultFakeTaskIdFour, - CreatedTimestamp: 5, - PodName: "pod4", - Namespace: "ns2", - State: model.RuntimeStateUnspecified, - PipelineName: "namespace/ns2/pipeline/pipeline2", - RunID: defaultFakeRunIdTwo, - MLMDExecutionID: "4", - StartedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "1", - }, - { - UUID: defaultFakeTaskIdFive, - CreatedTimestamp: 7, - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - PodName: "pod5", - State: model.RuntimeStatePaused, - RunID: defaultFakeRunIdTwo, - MLMDExecutionID: "5", - StartedTimestamp: 7, - FinishedTimestamp: 8, - Fingerprint: "10", - }, - { - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - PodName: "pod99", - RunID: defaultFakeRunIdTwo, - State: model.RuntimeStateCancelling, - MLMDExecutionID: "5", - StartedTimestamp: 7, - FinishedTimestamp: 8, - Fingerprint: "10", - }, - }, - false, - "", + } + + oldParams, err := model.ProtoSliceToJSONSlice([]*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{param1}) + assert.NoError(t, err) + newParams, err := model.ProtoSliceToJSONSlice([]*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{param2}) + assert.NoError(t, err) + + result, err := mergeParameters(oldParams, newParams) + assert.NoError(t, err) + // Different iterations create different keys, so we should have 2 parameters + assert.Equal(t, 2, len(result)) + + typeFunc := func() *apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter { + return &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{} + } + resultProtos, err := model.JSONSliceToProtoSlice(result, typeFunc) + assert.NoError(t, err) + + iterations := make(map[int64]bool) + for _, p := range resultProtos { + if p.Producer != nil && p.Producer.Iteration != nil { + iterations[*p.Producer.Iteration] = true + } + } + assert.True(t, iterations[0]) + assert.True(t, iterations[1]) +} + +func TestMergeParameters_RaceConditionScenario(t *testing.T) { + // Simulate a race condition where two driver tasks from different iterations + // within a loop try to update parameters simultaneously + + // Initial state - task already has some parameters + valExisting, _ := structpb.NewValue("existing-param") + existingParam := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: valExisting, + ParameterKey: "common-param", + Type: apiv2beta1.IOType_COMPONENT_INPUT, + } + + // Update from iteration 0 + valIter0, _ := structpb.NewValue("output-from-iter-0") + iter0Param := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: valIter0, + ParameterKey: "loop-output", + Type: apiv2beta1.IOType_ITERATOR_OUTPUT, + Producer: &apiv2beta1.IOProducer{ + TaskName: "loop-task", + Iteration: int64PTR(0), }, } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - err := taskStore.patchWithExistingTasks(tt.tasks) - if tt.wantErr { - assert.NotNil(t, err) - assert.Contains(t, err.Error(), tt.errMsg) - } else { - assert.Nil(t, err) - assert.Equal(t, tt.want, tt.tasks) + + // Update from iteration 1 (happening concurrently) + valIter1, _ := structpb.NewValue("output-from-iter-1") + iter1Param := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: valIter1, + ParameterKey: "loop-output", + Type: apiv2beta1.IOType_ITERATOR_OUTPUT, + Producer: &apiv2beta1.IOProducer{ + TaskName: "loop-task", + Iteration: int64PTR(1), + }, + } + + existingParams, err := model.ProtoSliceToJSONSlice([]*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{existingParam}) + assert.NoError(t, err) + + // First update: merge existing with iteration 0 + iter0Update, err := model.ProtoSliceToJSONSlice([]*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{iter0Param}) + assert.NoError(t, err) + + result1, err := mergeParameters(existingParams, iter0Update) + assert.NoError(t, err) + assert.Equal(t, 2, len(result1)) + + // Second update: merge result1 with iteration 1 + iter1Update, err := model.ProtoSliceToJSONSlice([]*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{iter1Param}) + assert.NoError(t, err) + + result2, err := mergeParameters(result1, iter1Update) + assert.NoError(t, err) + // Should have 3 parameters: existing + iter0 + iter1 + assert.Equal(t, 3, len(result2)) + + // Verify all three parameters are present + typeFunc := func() *apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter { + return &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{} + } + resultProtos, err := model.JSONSliceToProtoSlice(result2, typeFunc) + assert.NoError(t, err) + + hasExisting := false + hasIter0 := false + hasIter1 := false + + for _, p := range resultProtos { + if p.ParameterKey == "common-param" && p.Producer == nil { + hasExisting = true + } + if p.Producer != nil && p.Producer.Iteration != nil { + if *p.Producer.Iteration == 0 { + hasIter0 = true } - }) + if *p.Producer.Iteration == 1 { + hasIter1 = true + } + } } + + assert.True(t, hasExisting, "Should preserve existing parameter") + assert.True(t, hasIter0, "Should preserve iteration 0 parameter") + assert.True(t, hasIter1, "Should preserve iteration 1 parameter") } -func TestTaskStore_UpdateOrCreateTasks(t *testing.T) { - db, taskStore := initializeTaskStore() +// TestCreateTask_AutoPopulatesStateHistory verifies that state_history is automatically +// populated when creating a task with a state (mirrors Run behavior). +func TestCreateTask_AutoPopulatesStateHistory(t *testing.T) { + db, taskStore, _ := initializeTaskStore() defer db.Close() - // These are existing tasks created inside initializeTaskStore() - task1 := &model.Task{ - PodName: "pod4", - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - RunID: defaultFakeRunIdTwo, - MLMDExecutionID: "4", - StartedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "1", - } - task2 := &model.Task{ - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - PodName: "pod5", - RunID: defaultFakeRunIdTwo, - MLMDExecutionID: "5", - StartedTimestamp: 7, - FinishedTimestamp: 8, - Fingerprint: "10", - } - - // This is a new task - task3 := &model.Task{ - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - PodName: "pod99", - RunID: defaultFakeRunIdTwo, - MLMDExecutionID: "5", - StartedTimestamp: 3, - FinishedTimestamp: 4, - Fingerprint: "10", - } - - // Expected results - want1 := &model.Task{ - UUID: defaultFakeTaskIdFour, - CreatedTimestamp: 5, - PodName: "pod4", - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - RunID: defaultFakeRunIdTwo, - MLMDExecutionID: "4", - StartedTimestamp: 5, - FinishedTimestamp: 6, - Fingerprint: "1", - State: model.RuntimeStateUnspecified, - StateHistory: []*model.RuntimeStatus{{UpdateTimeInSec: 1, State: model.RuntimeStateUnspecified}}, - } - want2 := &model.Task{ - UUID: defaultFakeTaskIdFive, - CreatedTimestamp: 7, - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - PodName: "pod5", - RunID: defaultFakeRunIdTwo, - MLMDExecutionID: "5", - StartedTimestamp: 7, - FinishedTimestamp: 8, - Fingerprint: "10", - State: model.RuntimeStateUnspecified, - StateHistory: []*model.RuntimeStatus{{UpdateTimeInSec: 2, State: model.RuntimeStateUnspecified}}, - } - want3 := &model.Task{ - UUID: defaultFakeTaskIdSix, - CreatedTimestamp: 3, - Namespace: "ns2", - PipelineName: "namespace/ns2/pipeline/pipeline2", - PodName: "pod99", - RunID: defaultFakeRunIdTwo, - MLMDExecutionID: "5", - StartedTimestamp: 3, - FinishedTimestamp: 4, - Fingerprint: "10", - State: model.RuntimeStateUnspecified, - StateHistory: []*model.RuntimeStatus{{UpdateTimeInSec: 4, State: model.RuntimeStateUnspecified}}, - } - - tests := []struct { - name string - tasks []*model.Task - want []*model.Task - }{ - { - "valid - three tasks", - []*model.Task{task1, task2, task3}, - []*model.Task{want1, want2, want3}, - }, + pods := createTaskPodsAsJSONSlice(createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)) + task := &model.Task{ + Namespace: "ns1", + RunUUID: "run-1", + Pods: pods, + Fingerprint: "fp-1", + Name: "taskA", + State: model.TaskStatus(apiv2beta1.PipelineTaskDetail_RUNNING), + StateHistory: model.JSONSlice{}, // Empty state history + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: model.JSONData(map[string]interface{}{"k": "v"}), } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := taskStore.CreateOrUpdateTasks(tt.tasks) - assert.Nil(t, err) - assert.Equal(t, tt.want, got) - }) + + created, err := taskStore.CreateTask(task) + assert.NoError(t, err) + + // Verify state_history was auto-populated with initial state + assert.NotNil(t, created.StateHistory) + assert.Equal(t, 1, len(created.StateHistory), "Should have exactly 1 state history entry") + + // Fetch task from DB to verify persistence + fetched, err := taskStore.GetTask(created.UUID) + assert.NoError(t, err) + assert.Equal(t, 1, len(fetched.StateHistory), "Fetched task should have 1 state history entry") + + // Convert and verify state + typeFunc := func() *apiv2beta1.PipelineTaskDetail_TaskStatus { + return &apiv2beta1.PipelineTaskDetail_TaskStatus{} } + histProtos, err := model.JSONSliceToProtoSlice(fetched.StateHistory, typeFunc) + assert.NoError(t, err) + assert.Equal(t, 1, len(histProtos)) + assert.Equal(t, apiv2beta1.PipelineTaskDetail_RUNNING, histProtos[0].GetState()) + assert.NotNil(t, histProtos[0].GetUpdateTime()) + assert.Greater(t, histProtos[0].GetUpdateTime().GetSeconds(), int64(0)) } -func TestTaskAPIFieldMap(t *testing.T) { - for _, modelField := range (&model.Task{}).APIToModelFieldMap() { - assert.Contains(t, taskColumns, modelField) +// TestUpdateTask_AutoPopulatesStateHistory verifies that state transitions +// are automatically tracked in state_history when updating a task. +func TestUpdateTask_AutoPopulatesStateHistory(t *testing.T) { + db, taskStore, _ := initializeTaskStore() + defer db.Close() + + // Create initial task in RUNNING state + pods := createTaskPodsAsJSONSlice(createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)) + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(testUUID1, nil) + created, err := taskStore.CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: "run-1", + Pods: pods, + Fingerprint: "fp-0", + State: model.TaskStatus(apiv2beta1.PipelineTaskDetail_RUNNING), + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: map[string]interface{}{}, + }) + assert.NoError(t, err) + assert.Equal(t, 1, len(created.StateHistory), "Should have 1 entry after creation") + + // Update to SUCCEEDED state + update := &model.Task{ + UUID: created.UUID, + State: model.TaskStatus(apiv2beta1.PipelineTaskDetail_SUCCEEDED), + } + updated, err := taskStore.UpdateTask(update) + assert.NoError(t, err) + + // Should now have 2 entries: RUNNING and SUCCEEDED + assert.Equal(t, 2, len(updated.StateHistory), "Should have 2 state history entries after state change") + + // Verify states in order + typeFunc := func() *apiv2beta1.PipelineTaskDetail_TaskStatus { + return &apiv2beta1.PipelineTaskDetail_TaskStatus{} + } + histProtos, err := model.JSONSliceToProtoSlice(updated.StateHistory, typeFunc) + assert.NoError(t, err) + assert.Equal(t, apiv2beta1.PipelineTaskDetail_RUNNING, histProtos[0].GetState()) + assert.Equal(t, apiv2beta1.PipelineTaskDetail_SUCCEEDED, histProtos[1].GetState()) + assert.Greater(t, histProtos[1].GetUpdateTime().GetSeconds(), histProtos[0].GetUpdateTime().GetSeconds(), + "Second state timestamp should be after first") +} + +// TestUpdateTask_StateHistory_MultipleTransitions verifies that multiple +// state transitions are all captured in history. +func TestUpdateTask_StateHistory_MultipleTransitions(t *testing.T) { + db, taskStore, _ := initializeTaskStore() + defer db.Close() + + pods := createTaskPodsAsJSONSlice(createTaskPod("p1", "uid1", apiv2beta1.PipelineTaskDetail_EXECUTOR)) + taskStore.uuid = util.NewFakeUUIDGeneratorOrFatal(testUUID1, nil) + created, err := taskStore.CreateTask(&model.Task{ + Namespace: "ns1", + RunUUID: "run-1", + Pods: pods, + Fingerprint: "fp-0", + State: model.TaskStatus(apiv2beta1.PipelineTaskDetail_RUNNING), + StateHistory: model.JSONSlice{}, + InputParameters: model.JSONSlice{}, + OutputParameters: model.JSONSlice{}, + Type: 0, + TypeAttrs: map[string]interface{}{}, + }) + assert.NoError(t, err) + + // Transition: RUNNING → SUCCEEDED + update1 := &model.Task{ + UUID: created.UUID, + State: model.TaskStatus(apiv2beta1.PipelineTaskDetail_SUCCEEDED), + } + updated1, err := taskStore.UpdateTask(update1) + assert.NoError(t, err) + assert.Equal(t, 2, len(updated1.StateHistory)) + + // Transition: SUCCEEDED → FAILED (hypothetical retry scenario) + update2 := &model.Task{ + UUID: created.UUID, + State: model.TaskStatus(apiv2beta1.PipelineTaskDetail_FAILED), + } + updated2, err := taskStore.UpdateTask(update2) + assert.NoError(t, err) + assert.Equal(t, 3, len(updated2.StateHistory)) + + // Verify all states are preserved + typeFunc := func() *apiv2beta1.PipelineTaskDetail_TaskStatus { + return &apiv2beta1.PipelineTaskDetail_TaskStatus{} + } + histProtos, err := model.JSONSliceToProtoSlice(updated2.StateHistory, typeFunc) + assert.NoError(t, err) + assert.Equal(t, apiv2beta1.PipelineTaskDetail_RUNNING, histProtos[0].GetState()) + assert.Equal(t, apiv2beta1.PipelineTaskDetail_SUCCEEDED, histProtos[1].GetState()) + assert.Equal(t, apiv2beta1.PipelineTaskDetail_FAILED, histProtos[2].GetState()) +} + +// Test helper function getLastTaskState +func TestGetLastTaskState(t *testing.T) { + // Empty history + assert.Equal(t, model.TaskStatus(0), getLastTaskState(model.JSONSlice{})) + assert.Equal(t, model.TaskStatus(0), getLastTaskState(nil)) + + // Valid history with one entry + status1 := &apiv2beta1.PipelineTaskDetail_TaskStatus{ + UpdateTime: ×tamppb.Timestamp{Seconds: 100}, + State: apiv2beta1.PipelineTaskDetail_RUNNING, + } + history1, err := model.ProtoSliceToJSONSlice([]*apiv2beta1.PipelineTaskDetail_TaskStatus{status1}) + assert.NoError(t, err) + assert.Equal(t, model.TaskStatus(apiv2beta1.PipelineTaskDetail_RUNNING), getLastTaskState(history1)) + + // Valid history with multiple entries + status2 := &apiv2beta1.PipelineTaskDetail_TaskStatus{ + UpdateTime: ×tamppb.Timestamp{Seconds: 200}, + State: apiv2beta1.PipelineTaskDetail_SUCCEEDED, } + history2, err := model.ProtoSliceToJSONSlice([]*apiv2beta1.PipelineTaskDetail_TaskStatus{status1, status2}) + assert.NoError(t, err) + assert.Equal(t, model.TaskStatus(apiv2beta1.PipelineTaskDetail_SUCCEEDED), getLastTaskState(history2)) } diff --git a/backend/src/apiserver/validation/length.go b/backend/src/apiserver/validation/length.go index d793a69d813..75be3521240 100644 --- a/backend/src/apiserver/validation/length.go +++ b/backend/src/apiserver/validation/length.go @@ -59,9 +59,9 @@ var LengthSpecs = []ColLenSpec{ {Model: &model.Run{}, Field: "ExperimentId", Max: 64}, {Model: &model.Run{}, Field: "Conditions", Max: 125}, - {Model: &model.RunMetric{}, Field: "RunUUID", Max: 191}, - {Model: &model.RunMetric{}, Field: "NodeID", Max: 191}, - {Model: &model.RunMetric{}, Field: "Name", Max: 191}, + {Model: &model.RunMetricV1{}, Field: "Name", Max: 191}, + {Model: &model.RunMetricV1{}, Field: "NodeID", Max: 191}, + {Model: &model.RunMetricV1{}, Field: "RunUUID", Max: 191}, {Model: &model.Task{}, Field: "UUID", Max: 191}, // Note: struct field is RunID, column is RunUUID. diff --git a/backend/src/common/client/api_server/v2/pipeline_client.go b/backend/src/common/client/api_server/v2/pipeline_client.go index c5e8770ee04..637dae95b3a 100644 --- a/backend/src/common/client/api_server/v2/pipeline_client.go +++ b/backend/src/common/client/api_server/v2/pipeline_client.go @@ -17,6 +17,7 @@ package api_server_v2 import ( "crypto/tls" "fmt" + "github.com/go-openapi/runtime" "github.com/go-openapi/strfmt" apiclient "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/pipeline_client" diff --git a/backend/src/common/client/api_server/v2/pipeline_upload_client_kubernetes.go b/backend/src/common/client/api_server/v2/pipeline_upload_client_kubernetes.go index eb60da3e46e..5a7db247bcb 100644 --- a/backend/src/common/client/api_server/v2/pipeline_upload_client_kubernetes.go +++ b/backend/src/common/client/api_server/v2/pipeline_upload_client_kubernetes.go @@ -181,7 +181,7 @@ func (c *PipelineUploadClientKubernetes) Upload(parameters *params.UploadPipelin Description: pipeline.Spec.Description, DisplayName: pipeline.Spec.DisplayName, Name: pipeline.Name, - PipelineID: string(pipeline.ObjectMeta.UID), + PipelineID: string(pipeline.UID), Namespace: pipeline.Namespace, } @@ -226,7 +226,7 @@ func (c *PipelineUploadClientKubernetes) UploadPipelineVersion(filePath string, var pipeline *k8sapi.Pipeline for _, listedPipeline := range pipelineList.Items { - if string(listedPipeline.ObjectMeta.UID) == *parameters.Pipelineid { + if string(listedPipeline.UID) == *parameters.Pipelineid { pipeline = &listedPipeline break } diff --git a/backend/src/common/util/artifact.go b/backend/src/common/util/artifact.go new file mode 100644 index 00000000000..3793d9a658c --- /dev/null +++ b/backend/src/common/util/artifact.go @@ -0,0 +1,43 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package util + +import ( + "fmt" + "path" + "strings" + + "github.com/golang/glog" +) + +// GenerateOutputURI appends the specified paths to the pipeline root. +// It may be configured to preserve the query part of the pipeline root +// by splitting it off and appending it back to the full URI. +func GenerateOutputURI(pipelineRoot string, paths []string, preserveQueryString bool) string { + querySplit := strings.Split(pipelineRoot, "?") + query := "" + if len(querySplit) == 2 { + pipelineRoot = querySplit[0] + if preserveQueryString { + query = "?" + querySplit[1] + } + } else if len(querySplit) > 2 { + // this should never happen, but just in case. + glog.Warningf("Unexpected pipeline root: %v", pipelineRoot) + } + // we cannot path.Join(root, taskName, artifactName), because root + // contains scheme like gs:// and path.Join cleans up scheme to gs:/ + return fmt.Sprintf("%s/%s%s", strings.TrimRight(pipelineRoot, "/"), path.Join(paths...), query) +} diff --git a/backend/src/common/util/pointer.go b/backend/src/common/util/pointer.go index 0737118dcea..9b199281dd3 100644 --- a/backend/src/common/util/pointer.go +++ b/backend/src/common/util/pointer.go @@ -55,6 +55,10 @@ func Int32Pointer(i int32) *int32 { return &i } +func IntPointer(i int) *int { + return &i +} + func StringNilOrValue(s *string) string { if s == nil { return "" diff --git a/backend/src/common/util/proto_helpers.go b/backend/src/common/util/proto_helpers.go new file mode 100644 index 00000000000..9491a66338f --- /dev/null +++ b/backend/src/common/util/proto_helpers.go @@ -0,0 +1,58 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package util + +import ( + "encoding/json" + "fmt" + "strconv" + + "google.golang.org/protobuf/types/known/structpb" +) + +func PBValueToText(v *structpb.Value) (string, error) { + wrap := func(err error) error { + return fmt.Errorf("failed to convert protobuf.Value to text: %w", err) + } + if v == nil { + return "", nil + } + var text string + switch t := v.Kind.(type) { + case *structpb.Value_NullValue: + text = "" + case *structpb.Value_StringValue: + text = v.GetStringValue() + case *structpb.Value_NumberValue: + text = strconv.FormatFloat(v.GetNumberValue(), 'f', -1, 64) + case *structpb.Value_BoolValue: + text = strconv.FormatBool(v.GetBoolValue()) + case *structpb.Value_ListValue: + b, err := json.Marshal(v.GetListValue()) + if err != nil { + return "", wrap(fmt.Errorf("failed to JSON-marshal a list: %w", err)) + } + text = string(b) + case *structpb.Value_StructValue: + b, err := json.Marshal(v.GetStructValue()) + if err != nil { + return "", wrap(fmt.Errorf("failed to JSON-marshal a struct: %w", err)) + } + text = string(b) + default: + return "", wrap(fmt.Errorf("unknown type %T", t)) + } + return text, nil +} diff --git a/backend/src/common/util/scope_path.go b/backend/src/common/util/scope_path.go new file mode 100644 index 00000000000..da42d441be8 --- /dev/null +++ b/backend/src/common/util/scope_path.go @@ -0,0 +1,255 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package util + +// ScopePath provides hierarchical navigation through a pipeline's DAG (Directed Acyclic Graph) structure. +// It maintains the execution context by tracking a path from the root component through nested tasks, +// storing each task's name, task spec, and component spec along the way. This allows the pipeline +// runtime to resolve inputs/outputs and understand the current position within nested DAG components. +// +// The path is implemented as a linked list, starting with "root" and growing as tasks are pushed onto it. +// It supports stack-like operations (Push/Pop) for traversing into and out of nested components. + +import ( + "fmt" + + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/types/known/structpb" +) + +type ScopePath struct { + list *LinkedList[ScopePathEntry] + pipelineSpec *pipelinespec.PipelineSpec + pipelineSpecStruct *structpb.Struct + size int +} +type ScopePathEntry struct { + taskName string + taskSpec *pipelinespec.PipelineTaskSpec + componentSpec *pipelinespec.ComponentSpec +} + +func (e *ScopePathEntry) GetTaskSpec() *pipelinespec.PipelineTaskSpec { + return e.taskSpec +} + +func (e *ScopePathEntry) GetComponentSpec() *pipelinespec.ComponentSpec { + return e.componentSpec +} + +func newScopePath( + pipelineSpec *pipelinespec.PipelineSpec, + pipelineSpecStruct *structpb.Struct, +) ScopePath { + return ScopePath{ + pipelineSpec: pipelineSpec, + pipelineSpecStruct: pipelineSpecStruct, + } +} + +func NewScopePathFromStruct(spec *structpb.Struct) (ScopePath, error) { + pipelineSpec := &pipelinespec.PipelineSpec{} + // Convert struct to JSON + b, err := spec.MarshalJSON() + if err != nil { + return ScopePath{}, fmt.Errorf("failed to marshal spec to JSON: %w", err) + } + // Unmarshal JSON to PipelineSpec + if err := protojson.Unmarshal(b, pipelineSpec); err != nil { + return ScopePath{}, fmt.Errorf("failed to unmarshal spec: %w", err) + } + return newScopePath(pipelineSpec, spec), nil +} + +// ScopePathFromStringPathWithNewTask builds a ScopePath from a string path and pushes the newTask to the end of the path. +func ScopePathFromStringPathWithNewTask(rawPipelineSpec *structpb.Struct, path []string, newTask string) (ScopePath, error) { + if rawPipelineSpec == nil { + return ScopePath{}, fmt.Errorf("PipelineSpec is nil") + } + scopePath, err := ScopePathFromStringPath(rawPipelineSpec, path) + if err != nil { + return ScopePath{}, fmt.Errorf("failed to build scope path: %w", err) + } + // Update scope path to current context + err = scopePath.Push(newTask) + if err != nil { + return ScopePath{}, err + } + return scopePath, nil +} + +// ScopePathFromStringPath builds a ScopePath from a string path. +func ScopePathFromStringPath(rawPipelineSpec *structpb.Struct, path []string) (ScopePath, error) { + if rawPipelineSpec == nil { + return ScopePath{}, fmt.Errorf("PipelineSpec is nil") + } + scopePath, err := NewScopePathFromStruct(rawPipelineSpec) + if err != nil { + return ScopePath{}, fmt.Errorf("failed to build scope path: %w", err) + } + for _, taskName := range path { + if err := scopePath.Push(taskName); err != nil { + return ScopePath{}, fmt.Errorf("failed to build scope path at task %q: %w", taskName, err) + } + } + return scopePath, nil +} + +func (s *ScopePath) Push(taskName string) error { + if s.list == nil { + s.list = &LinkedList[ScopePathEntry]{} + } + if taskName == "root" { + sp := ScopePathEntry{ + taskName: taskName, + componentSpec: s.pipelineSpec.Root, + } + s.list.append(sp) + s.size++ + return nil + } + if s.list.head == nil { + return fmt.Errorf("scope path is empty, first task should be root") + } + if s.list.head.Value.componentSpec.GetDag() == nil { + return fmt.Errorf("this component is not a DAG component") + } + lastTask := s.GetLast() + if lastTask == nil { + return fmt.Errorf("last task is nil") + } + if _, ok := lastTask.componentSpec.GetDag().Tasks[taskName]; !ok { + return fmt.Errorf("task %s is not found", taskName) + } + taskSpec := lastTask.componentSpec.GetDag().Tasks[taskName] + if _, ok := s.pipelineSpec.Components[taskSpec.GetComponentRef().GetName()]; !ok { + return fmt.Errorf("component %s is not found", taskSpec.GetComponentRef().GetName()) + } + componentSpec := s.pipelineSpec.Components[taskSpec.GetComponentRef().GetName()] + sp := ScopePathEntry{ + taskName: taskName, + taskSpec: taskSpec, + componentSpec: componentSpec, + } + s.list.append(sp) + s.size++ + return nil +} + +func (s *ScopePath) Pop() (ScopePathEntry, bool) { + entry, ok := s.list.pop() + if ok { + s.size-- + } + return entry, ok +} + +func (s *ScopePath) GetRoot() *ScopePathEntry { + return &s.list.head.Value +} + +func (s *ScopePath) GetLast() *ScopePathEntry { + spe, ok := s.list.last() + if !ok { + return nil + } + return &spe +} + +func (s *ScopePath) GetSize() int { + return s.size +} + +func (s *ScopePath) GetPipelineSpec() *pipelinespec.PipelineSpec { + return s.pipelineSpec +} + +func (s *ScopePath) GetPipelineSpecStruct() *structpb.Struct { + return s.pipelineSpecStruct +} + +func (s *ScopePath) StringPath() []string { + var path []string + if s.list == nil { + return path + } + for n := s.list.head; n != nil; n = n.Next { + path = append(path, n.Value.taskName) + } + return path +} + +// Node represents one element in the list. +type Node[T any] struct { + Value T + Next *Node[T] +} + +// LinkedList is a simple singly linked list. +type LinkedList[T any] struct { + head *Node[T] +} + +// append adds a new node to the end of the list. +func (l *LinkedList[T]) append(v T) { + newNode := &Node[T]{Value: v} + if l.head == nil { + l.head = newNode + return + } + curr := l.head + for curr.Next != nil { + curr = curr.Next + } + curr.Next = newNode +} + +// pop removes and returns the last element. +// Returns (zeroValue, false) if list is empty. +func (l *LinkedList[T]) pop() (T, bool) { + var zero T + if l.head == nil { + return zero, false + } + // Single element case + if l.head.Next == nil { + val := l.head.Value + l.head = nil + return val, true + } + // Traverse to second-last node + curr := l.head + for curr.Next.Next != nil { + curr = curr.Next + } + val := curr.Next.Value + curr.Next = nil + return val, true +} + +// last returns the value of the last node without removing it. +// Returns (zeroValue, false) if list is empty. +func (l *LinkedList[T]) last() (T, bool) { + var zero T + if l.head == nil { + return zero, false + } + curr := l.head + for curr.Next != nil { + curr = curr.Next + } + return curr.Value, true +} diff --git a/backend/src/common/util/scope_path_test.go b/backend/src/common/util/scope_path_test.go new file mode 100644 index 00000000000..b49c60b86d6 --- /dev/null +++ b/backend/src/common/util/scope_path_test.go @@ -0,0 +1,130 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package util + +import ( + "testing" + + "github.com/stretchr/testify/require" + "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/types/known/structpb" +) + +func TestScopePath(t *testing.T) { + // Load pipeline spec + pipelineSpec, _, err := LoadPipelineAndPlatformSpec("../../v2/driver/test_data/loop_collected_raw_Iterator.py.yaml") + require.NoError(t, err) + require.NotNil(t, pipelineSpec) + + // Convert PipelineSpec to Struct + b, err := protojson.Marshal(pipelineSpec) + require.NoError(t, err) + var pipelineSpecStruct structpb.Struct + err = protojson.Unmarshal(b, &pipelineSpecStruct) + require.NoError(t, err) + + scopePath, err := NewScopePathFromStruct(&pipelineSpecStruct) + require.NoError(t, err) + require.NotNil(t, scopePath) + + require.Empty(t, scopePath.StringPath()) + + err = scopePath.Push("not-root") + require.Error(t, err) + + require.Equal(t, 0, scopePath.GetSize()) + + err = scopePath.Push("root") + require.NoError(t, err) + require.NotNil(t, scopePath) + require.Equal(t, 1, scopePath.GetSize()) + + head := scopePath.GetRoot() + last := scopePath.GetLast() + require.Equal(t, head, last) + require.NotNil(t, head) + require.NotNil(t, head.GetComponentSpec()) + require.Nil(t, head.GetTaskSpec()) + require.Equal(t, 2, len(head.componentSpec.GetDag().GetTasks())) + require.NotNil(t, head.componentSpec.GetDag().GetTasks()["analyze-artifact-list"]) + + err = scopePath.Push("secondary-pipeline") + last = scopePath.GetLast() + require.NotEqual(t, head, last) + require.NoError(t, err) + require.NotNil(t, last.GetComponentSpec()) + require.NotNil(t, last.GetTaskSpec()) + + err = scopePath.Push("does-not-exist") + require.Error(t, err) + + err = scopePath.Push("for-loop-2") + require.NoError(t, err) + last = scopePath.GetLast() + require.Equal(t, last.GetTaskSpec().GetTaskInfo().GetName(), "for-loop-2") + require.Len(t, last.GetComponentSpec().GetDag().GetTasks(), 2) + + require.Equal(t, []string{"root", "secondary-pipeline", "for-loop-2"}, scopePath.StringPath()) + require.Equal(t, 3, scopePath.GetSize()) + + spe, ok := scopePath.Pop() + require.True(t, ok) + require.Equal(t, spe.GetTaskSpec().GetTaskInfo().GetName(), "for-loop-2") + + spe, ok = scopePath.Pop() + require.True(t, ok) + require.Equal(t, "secondary-pipeline", spe.GetTaskSpec().GetTaskInfo().GetName()) + + require.Equal(t, []string{"root"}, scopePath.StringPath()) + + // Back to the head + spe, ok = scopePath.Pop() + require.True(t, ok) + require.NotNil(t, head) + require.NotNil(t, head.GetComponentSpec()) + require.Nil(t, head.GetTaskSpec()) + + spe, ok = scopePath.Pop() + require.False(t, ok) + require.Empty(t, spe) + + require.Equal(t, 0, scopePath.GetSize()) + + require.Empty(t, scopePath.StringPath()) +} + +func TestBuildFromStringPath(t *testing.T) { + // Load pipeline spec + pipelineSpec, _, err := LoadPipelineAndPlatformSpec("../../v2/driver/test_data/loop_collected_raw_Iterator.py.yaml") + require.NoError(t, err) + + // Convert PipelineSpec to Struct + b, err := protojson.Marshal(pipelineSpec) + require.NoError(t, err) + var st structpb.Struct + err = protojson.Unmarshal(b, &st) + require.NoError(t, err) + + // Test successful path construction + path := []string{"root", "secondary-pipeline"} + scopePath, err := ScopePathFromStringPathWithNewTask(&st, path, "for-loop-2") + require.NoError(t, err) + require.Equal(t, []string{"root", "secondary-pipeline", "for-loop-2"}, scopePath.StringPath()) + + // Test invalid path + invalidPath := []string{"root", "non-existent-task"} + _, err = ScopePathFromStringPathWithNewTask(&st, invalidPath, "") + require.Error(t, err) +} diff --git a/backend/src/common/util/yaml_parser.go b/backend/src/common/util/yaml_parser.go new file mode 100644 index 00000000000..fe7c3353300 --- /dev/null +++ b/backend/src/common/util/yaml_parser.go @@ -0,0 +1,122 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package util + +import ( + "bytes" + "fmt" + "os" + + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + "github.com/kubeflow/pipelines/kubernetes_platform/go/kubernetesplatform" + "google.golang.org/protobuf/encoding/protojson" + yaml3 "gopkg.in/yaml.v3" + "sigs.k8s.io/yaml" +) + +func LoadPipelineAndPlatformSpec(path string) (*pipelinespec.PipelineSpec, *pipelinespec.PlatformSpec, error) { + data, err := os.ReadFile(path) + if err != nil { + return nil, nil, fmt.Errorf("failed to read file: %w", err) + } + + dec := yaml3.NewDecoder(bytes.NewReader(data)) + um := protojson.UnmarshalOptions{} + + var first pipelinespec.PipelineSpec + var second pipelinespec.PlatformSpec + + for i := 0; i < 2; i++ { + var doc any + if err := dec.Decode(&doc); err != nil { + break // io.EOF means fewer than 2 docs + } + + // Convert YAML -> JSON + jsonBytes, err := yamlV3ToJSON(doc) + if err != nil { + return nil, nil, fmt.Errorf("failed to convert YAML to JSON: %w", err) + } + + switch i { + case 0: + if err := um.Unmarshal(jsonBytes, &first); err != nil { + return nil, nil, fmt.Errorf("failed to unmarshal first spec: %w", err) + } + case 1: + if err := um.Unmarshal(jsonBytes, &second); err != nil { + return nil, nil, fmt.Errorf("failed to unmarshal second spec: %w", err) + } + } + } + + return &first, &second, nil +} + +// helper: re-encode the generic YAML doc and convert it to JSON +func yamlV3ToJSON(v any) ([]byte, error) { + y, err := yaml.Marshal(v) + if err != nil { + return nil, err + } + return yaml.YAMLToJSON(y) +} + +func LoadKubernetesExecutorConfig( + componentSpec *pipelinespec.ComponentSpec, + platformSpec *pipelinespec.PlatformSpec, +) (*kubernetesplatform.KubernetesExecutorConfig, error) { + + var kubernetesPlatformSpec kubernetesplatform.KubernetesExecutorConfig + if componentSpec.GetExecutorLabel() == "" { + return nil, fmt.Errorf("executor label not found") + } + + executorLabel := componentSpec.GetExecutorLabel() + if platformSpec.GetPlatforms() != nil { + if singlePlatformSpecRaw, ok := platformSpec.GetPlatforms()["kubernetes"]; ok { + + var singlePlatformSpec pipelinespec.SinglePlatformSpec + jsonBytes, err := protojson.Marshal(singlePlatformSpecRaw) + if err != nil { + return nil, fmt.Errorf("failed to marshal single platform spec: %w", err) + } + if err := protojson.Unmarshal(jsonBytes, &singlePlatformSpec); err != nil { + return nil, fmt.Errorf("failed to unmarshal single platform spec: %w", err) + } + + if singlePlatformSpec.GetDeploymentSpec() == nil { + return nil, fmt.Errorf("deployment spec not found") + } + if singlePlatformSpec.GetDeploymentSpec().GetExecutors() == nil { + return nil, fmt.Errorf("executors not found") + } + + executor := singlePlatformSpec.GetDeploymentSpec().GetExecutors()[executorLabel] + if executor != nil { + jsonBytes, err := protojson.Marshal(executor) + if err != nil { + return nil, fmt.Errorf("failed to marshal executor config: %w", err) + } + if err := protojson.Unmarshal(jsonBytes, &kubernetesPlatformSpec); err != nil { + return nil, fmt.Errorf("failed to unmarshal executor config: %w", err) + } + } + } else { + return nil, fmt.Errorf("kubernetes platform config not found") + } + } + return &kubernetesPlatformSpec, nil +} diff --git a/backend/src/v2/apiclient/auth.go b/backend/src/v2/apiclient/auth.go new file mode 100644 index 00000000000..bab7a761ac0 --- /dev/null +++ b/backend/src/v2/apiclient/auth.go @@ -0,0 +1,112 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package apiclient provides API client functionality for KFP v2. +package apiclient + +import ( + "context" + "os" + "sync" + "time" + + "github.com/golang/glog" + "google.golang.org/grpc" + "google.golang.org/grpc/metadata" +) + +const ( + // KFPTokenPath is the path where the projected service account token is mounted + KFPTokenPath = "/var/run/secrets/kfp/token" + // TokenCacheTTL is how long we cache the token before re-reading from disk + TokenCacheTTL = 5 * time.Minute +) + +// tokenCache holds a cached token and its expiry time +type tokenCache struct { + mu sync.RWMutex + token string + expiresAt time.Time +} + +var cache = &tokenCache{} + +// getToken reads the KFP service account token from the projected volume. +// It caches the token for TokenCacheTTL to avoid reading from disk on every request. +// Returns empty string if the token file doesn't exist (e.g., dev environments). +func getToken() string { + cache.mu.RLock() + // Check if we have a valid cached token + if cache.token != "" && time.Now().Before(cache.expiresAt) { + token := cache.token + cache.mu.RUnlock() + return token + } + cache.mu.RUnlock() + + // Need to refresh the token + cache.mu.Lock() + defer cache.mu.Unlock() + + // Double-check after acquiring write lock + if cache.token != "" && time.Now().Before(cache.expiresAt) { + return cache.token + } + + // Read token from file + tokenBytes, err := os.ReadFile(KFPTokenPath) + if err != nil { + if os.IsNotExist(err) { + // Token file doesn't exist - likely dev/test environment + // Log once per cache refresh cycle to avoid spam + if cache.token == "" { + glog.V(2).Infof("KFP token file not found at %s, proceeding without authentication", KFPTokenPath) + } + cache.token = "" + cache.expiresAt = time.Now().Add(TokenCacheTTL) + return "" + } + // Other errors (permissions, I/O) - log warning but don't fail + glog.Warningf("Failed to read KFP token from %s: %v, proceeding without authentication", KFPTokenPath, err) + cache.token = "" + cache.expiresAt = time.Now().Add(1 * time.Minute) // Retry sooner on errors + return "" + } + + token := string(tokenBytes) + cache.token = token + cache.expiresAt = time.Now().Add(TokenCacheTTL) + + glog.V(3).Infof("Successfully loaded KFP token from %s", KFPTokenPath) + return token +} + +// authUnaryInterceptor is a gRPC unary interceptor that adds the Authorization header +// to all outgoing requests using the KFP service account token. +func authUnaryInterceptor( + ctx context.Context, + method string, + req interface{}, + reply interface{}, + cc *grpc.ClientConn, + invoker grpc.UnaryInvoker, + opts ...grpc.CallOption, +) error { + token := getToken() + if token != "" { + // Add Authorization header with Bearer token + ctx = metadata.AppendToOutgoingContext(ctx, "authorization", "Bearer "+token) + } + return invoker(ctx, method, req, reply, cc, opts...) +} diff --git a/backend/src/v2/apiclient/client.go b/backend/src/v2/apiclient/client.go new file mode 100644 index 00000000000..a5b3e2b537f --- /dev/null +++ b/backend/src/v2/apiclient/client.go @@ -0,0 +1,87 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package apiclient + +import ( + "crypto/tls" + "fmt" + "os" + + gc "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "google.golang.org/grpc" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/credentials/insecure" +) + +// Client provides typed clients for KFP v2beta1 API services used by driver/launcher. +type Client struct { + Run gc.RunServiceClient + Pipeline gc.PipelineServiceClient + Artifact gc.ArtifactServiceClient + Conn *grpc.ClientConn + Endpoint string +} + +// Config holds connection options. +type Config struct { + // Endpoint in host:port form, e.g. ml-pipeline.kubeflow:8887 + Endpoint string +} + +// FromEnv builds a Config from environment with sensible defaults. +// KFP_API_ADDRESS and KFP_API_PORT are used; default is ml-pipeline.kubeflow:8887. +func FromEnv() *Config { + addr := os.Getenv("KFP_API_ADDRESS") + port := os.Getenv("KFP_API_PORT") + endpoint := "ml-pipeline.kubeflow:8887" + if addr != "" && port != "" { + endpoint = fmt.Sprintf("%s:%s", addr, port) + } + return &Config{Endpoint: endpoint} +} + +// New creates a new API client connection. +func New(cfg *Config, tlsCfg *tls.Config) (*Client, error) { + if cfg == nil || cfg.Endpoint == "" { + return nil, fmt.Errorf("invalid config: missing endpoint") + } + creds := insecure.NewCredentials() + if tlsCfg != nil { + creds = credentials.NewTLS(tlsCfg) + } + conn, err := grpc.NewClient( + cfg.Endpoint, + grpc.WithTransportCredentials(creds), + grpc.WithUnaryInterceptor(authUnaryInterceptor), // Add auth interceptor for all requests + ) + if err != nil { + return nil, fmt.Errorf("failed to connect to KFP API at %s: %w", cfg.Endpoint, err) + } + return &Client{ + Run: gc.NewRunServiceClient(conn), + Pipeline: gc.NewPipelineServiceClient(conn), + Artifact: gc.NewArtifactServiceClient(conn), + Conn: conn, + Endpoint: cfg.Endpoint, + }, nil +} + +// Close closes the underlying gRPC connection. +func (c *Client) Close() error { + if c == nil || c.Conn == nil { + return nil + } + return c.Conn.Close() +} diff --git a/backend/src/v2/apiclient/kfpapi/api.go b/backend/src/v2/apiclient/kfpapi/api.go new file mode 100644 index 00000000000..686ea163434 --- /dev/null +++ b/backend/src/v2/apiclient/kfpapi/api.go @@ -0,0 +1,355 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package kfpapi + +import ( + "context" + "fmt" + "time" + + gc "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/backend/src/v2/apiclient" + "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/types/known/structpb" + "google.golang.org/protobuf/types/known/timestamppb" +) + +// API is a minimal interface exposing KFP API operations needed by drivers and launchers. +// It abstracts over RunService, ArtifactService, and PipelineService. +// +// This indirection lets us unit test components and also evolve the underlying +// client without touching driver/launcher logic. +// +// Note: We intentionally do not expose the full apiclient.Client here. +// Only the small surface area needed is included. + +type API interface { + // Run operations + GetRun(ctx context.Context, req *gc.GetRunRequest) (*gc.Run, error) + + // Task operations + CreateTask(ctx context.Context, req *gc.CreateTaskRequest) (*gc.PipelineTaskDetail, error) + UpdateTask(ctx context.Context, req *gc.UpdateTaskRequest) (*gc.PipelineTaskDetail, error) + UpdateTasksBulk(ctx context.Context, req *gc.UpdateTasksBulkRequest) (*gc.UpdateTasksBulkResponse, error) + GetTask(ctx context.Context, req *gc.GetTaskRequest) (*gc.PipelineTaskDetail, error) + ListTasks(ctx context.Context, req *gc.ListTasksRequest) (*gc.ListTasksResponse, error) + + // Artifact operations + CreateArtifact(ctx context.Context, req *gc.CreateArtifactRequest) (*gc.Artifact, error) + CreateArtifactsBulk(ctx context.Context, req *gc.CreateArtifactsBulkRequest) (*gc.CreateArtifactsBulkResponse, error) + ListArtifactsByURI(ctx context.Context, uri, namespace string) ([]*gc.Artifact, error) + ListArtifactTasks(ctx context.Context, req *gc.ListArtifactTasksRequest) (*gc.ListArtifactTasksResponse, error) + CreateArtifactTask(ctx context.Context, req *gc.CreateArtifactTaskRequest) (*gc.ArtifactTask, error) + CreateArtifactTasks(ctx context.Context, req *gc.CreateArtifactTasksBulkRequest) (*gc.CreateArtifactTasksBulkResponse, error) + + // Pipeline version operations + GetPipelineVersion(ctx context.Context, req *gc.GetPipelineVersionRequest) (*gc.PipelineVersion, error) + FetchPipelineSpecFromRun(ctx context.Context, run *gc.Run) (*structpb.Struct, error) + + // Propagate status updates up the DAG + UpdateStatuses(ctx context.Context, run *gc.Run, pipelineSpec *structpb.Struct, currentTask *gc.PipelineTaskDetail) error +} + +// clientAdapter adapts apiclient.Client to API. +// It is a thin wrapper delegating to the generated gRPC clients. + +type clientAdapter struct { + c *apiclient.Client +} + +// New wraps the apiclient.Client into an API interface. +func New(c *apiclient.Client) API { + return &clientAdapter{c: c} +} + +// Implement API by forwarding calls to typed clients. + +func (k *clientAdapter) GetRun(ctx context.Context, req *gc.GetRunRequest) (*gc.Run, error) { + return k.c.Run.GetRun(ctx, req) +} + +func (k *clientAdapter) CreateTask(ctx context.Context, req *gc.CreateTaskRequest) (*gc.PipelineTaskDetail, error) { + return k.c.Run.CreateTask(ctx, req) +} + +func (k *clientAdapter) UpdateTask(ctx context.Context, req *gc.UpdateTaskRequest) (*gc.PipelineTaskDetail, error) { + return k.c.Run.UpdateTask(ctx, req) +} + +func (k *clientAdapter) UpdateTasksBulk(ctx context.Context, req *gc.UpdateTasksBulkRequest) (*gc.UpdateTasksBulkResponse, error) { + return k.c.Run.UpdateTasksBulk(ctx, req) +} + +func (k *clientAdapter) GetTask(ctx context.Context, req *gc.GetTaskRequest) (*gc.PipelineTaskDetail, error) { + return k.c.Run.GetTask(ctx, req) +} + +func (k *clientAdapter) ListTasks(ctx context.Context, req *gc.ListTasksRequest) (*gc.ListTasksResponse, error) { + return k.c.Run.ListTasks(ctx, req) +} + +func (k *clientAdapter) CreateArtifact(ctx context.Context, req *gc.CreateArtifactRequest) (*gc.Artifact, error) { + return k.c.Artifact.CreateArtifact(ctx, req) +} + +func (k *clientAdapter) CreateArtifactsBulk(ctx context.Context, req *gc.CreateArtifactsBulkRequest) (*gc.CreateArtifactsBulkResponse, error) { + return k.c.Artifact.CreateArtifactsBulk(ctx, req) +} + +func (k *clientAdapter) ListArtifactsByURI(ctx context.Context, uri, namespace string) ([]*gc.Artifact, error) { + predicates := []*gc.Predicate{ + {Key: "uri", Operation: gc.Predicate_EQUALS, Value: &gc.Predicate_StringValue{StringValue: uri}}, + } + filter := &gc.Filter{ + Predicates: predicates, + } + mo := protojson.MarshalOptions{ + UseProtoNames: true, + EmitUnpopulated: false, + } + filterJSON, err := mo.Marshal(filter) + if err != nil { + return nil, fmt.Errorf("failed to marshal filter: %v", err) + } + + const pageSize = 100 + var allArtifacts []*gc.Artifact + nextPageToken := "" + + for { + artifactsResponse, err := k.c.Artifact.ListArtifacts(ctx, &gc.ListArtifactRequest{ + Namespace: namespace, + Filter: string(filterJSON), + PageSize: pageSize, + PageToken: nextPageToken, + }) + if err != nil { + return nil, err + } + + allArtifacts = append(allArtifacts, artifactsResponse.GetArtifacts()...) + nextPageToken = artifactsResponse.GetNextPageToken() + + if nextPageToken == "" { + break + } + } + + return allArtifacts, nil +} + +func (k *clientAdapter) ListArtifactTasks(ctx context.Context, req *gc.ListArtifactTasksRequest) (*gc.ListArtifactTasksResponse, error) { + return k.c.Artifact.ListArtifactTasks(ctx, req) +} + +func (k *clientAdapter) CreateArtifactTask(ctx context.Context, req *gc.CreateArtifactTaskRequest) (*gc.ArtifactTask, error) { + return k.c.Artifact.CreateArtifactTask(ctx, req) +} + +func (k *clientAdapter) CreateArtifactTasks(ctx context.Context, req *gc.CreateArtifactTasksBulkRequest) (*gc.CreateArtifactTasksBulkResponse, error) { + return k.c.Artifact.CreateArtifactTasksBulk(ctx, req) +} + +func (k *clientAdapter) GetPipelineVersion(ctx context.Context, req *gc.GetPipelineVersionRequest) (*gc.PipelineVersion, error) { + return k.c.Pipeline.GetPipelineVersion(ctx, req) +} + +func (k *clientAdapter) FetchPipelineSpecFromRun(ctx context.Context, run *gc.Run) (*structpb.Struct, error) { + var pipelineSpecStruct *structpb.Struct + if run.GetPipelineSpec() != nil { + pipelineSpecStruct = run.GetPipelineSpec() + } else if run.GetPipelineVersionReference() != nil { + pvr := run.GetPipelineVersionReference() + pipeline, err := k.GetPipelineVersion(ctx, &gc.GetPipelineVersionRequest{ + PipelineId: pvr.GetPipelineId(), + PipelineVersionId: pvr.GetPipelineVersionId(), + }) + if err != nil { + return nil, err + } + pipelineSpecStruct = pipeline.GetPipelineSpec() + } else { + return nil, fmt.Errorf("pipeline spec is not set") + } + // When platform_spec is included, then the structure of the PipelineSpec is different. + if spec, ok := pipelineSpecStruct.GetFields()["pipeline_spec"]; ok { + return spec.GetStructValue(), nil + } + return pipelineSpecStruct, nil +} + +// UpdateStatuses Traverse up the dag until we find a parent task that still has other children with "RUNNING" status +// or when we have reached Root. If the parent task has other children in running that means this parent is also running. +// However, if the currentTask is a parent task, and all children tasks have been created (though not necessarily completed): +// - if all children in this DAG are all CACHED, then the currentTask should be updated to be "CACHED" +// - if any of the children in this DAG are FAILED, then the currentTask should be updated to be "FAILED" +// - if all children in this DAG were SKIPPED, then the currentTask should be updated to be "SKIPPED" +// - In any other case the state is SUCCEEDED +// +// TODO(HumairAK): Let's have API Server handle this call instead of doing it here. +func (k *clientAdapter) UpdateStatuses(ctx context.Context, run *gc.Run, pipelineSpec *structpb.Struct, currentTask *gc.PipelineTaskDetail) error { + return updateStatuses(ctx, run, k, pipelineSpec, currentTask) +} + +// updateStatuses traverses up the dag until we find a parent task that still has other children with "RUNNING" status +// or when we have reached Root. +// This function is separated from UpdateStatuses so that it can be used by the mock api client in tests. +func updateStatuses(ctx context.Context, run *gc.Run, kfpAPIClient API, pipelineSpec *structpb.Struct, currentTask *gc.PipelineTaskDetail) error { + // Create a map of task IDs to tasks for quick lookup + taskMap := make(map[string]*gc.PipelineTaskDetail) + for _, task := range run.GetTasks() { + taskMap[task.GetTaskId()] = task + } + + // Start with the current task and traverse up + for { + // If current task has no parent, we've reached the root + if currentTask.ParentTaskId == nil || *currentTask.ParentTaskId == "" { + // Evaluate the root task's status based on its children + if err := evaluateAndUpdateParentStatus(ctx, run, currentTask, kfpAPIClient); err != nil { + return fmt.Errorf("failed to evaluate root task %s status: %w", currentTask.GetTaskId(), err) + } + break + } + + // Get the parent task + parentTask, exists := taskMap[*currentTask.ParentTaskId] + if !exists { + return fmt.Errorf("parent task %s not found for task %s", *currentTask.ParentTaskId, currentTask.GetTaskId()) + } + + // Determine the total number of child tasks by inspecting the parent dag's + // task count within it's component spec. + // We need to use the parent task's scope path, not the current task's scope path + // Note this doesn't factor in the number of iterations of these child tasks when in a loop. + getScopePath, err := util.ScopePathFromStringPath(pipelineSpec, parentTask.GetScopePath()) + if err != nil { + return fmt.Errorf("failed to get scope path for parent task %s: %w", parentTask.GetTaskId(), err) + } + if getScopePath.GetLast() == nil || getScopePath.GetLast().GetComponentSpec() == nil || getScopePath.GetLast().GetComponentSpec().GetDag() == nil { + return fmt.Errorf("failed to get dag for parent task %s (scope: %s): component spec or dag is nil", parentTask.GetTaskId(), parentTask.GetScopePath()) + } + getScopePath.GetLast().GetComponentSpec().GetDag().GetTasks() + numberOfTasksInThisDag := len(getScopePath.GetLast().GetComponentSpec().GetDag().GetTasks()) + + // Before we proceed to update this parent task's status, we need to ensure that all child tasks have been + // created (irrespective of their status). + var expectedTotalChildTasks int + if parentTask.GetType() == gc.PipelineTaskDetail_LOOP { + typeAttrs := parentTask.GetTypeAttributes() + if typeAttrs == nil || typeAttrs.IterationCount == nil { + return fmt.Errorf("loop task %s is missing iteration_count attribute", parentTask.GetTaskId()) + } + expectedTotalChildTasks = int(*typeAttrs.IterationCount) * numberOfTasksInThisDag + } else { + expectedTotalChildTasks = numberOfTasksInThisDag + } + // Now count the actual number of child tasks created. + var childCount int + for _, task := range run.GetTasks() { + if task.ParentTaskId != nil && *task.ParentTaskId == parentTask.GetTaskId() { + if task.GetState() == gc.PipelineTaskDetail_RUNNING { + return nil + } + childCount++ + } + } + + // If not all children created yet, exit traversal + if childCount < expectedTotalChildTasks { + return nil + } + + // Evaluate and update parent's status based on its children + if err := evaluateAndUpdateParentStatus(ctx, run, parentTask, kfpAPIClient); err != nil { + return fmt.Errorf("failed to evaluate parent task %s status: %w", parentTask.GetTaskId(), err) + } + + // Move to the parent for next iteration + currentTask = parentTask + } + return nil +} + +// evaluateAndUpdateParentStatus evaluates a parent task's status based on its direct children and updates it accordingly +func evaluateAndUpdateParentStatus( + ctx context.Context, + run *gc.Run, + parentTask *gc.PipelineTaskDetail, + kfpAPIClient API, +) error { + // Collect all direct children of this parent + var children []*gc.PipelineTaskDetail + for _, task := range run.GetTasks() { + if task.ParentTaskId != nil && *task.ParentTaskId == parentTask.GetTaskId() { + children = append(children, task) + } + } + + // If no children, nothing to evaluate + if len(children) == 0 { + return nil + } + + // Evaluate child statuses + allCached := true + allSkipped := true + anyFailed := false + + for _, child := range children { + status := child.GetState() + + // Check for FAILED + if status == gc.PipelineTaskDetail_FAILED { + anyFailed = true + } + + // Check if all are CACHED + if status != gc.PipelineTaskDetail_CACHED { + allCached = false + } + + // Check if all are SKIPPED + if status != gc.PipelineTaskDetail_SKIPPED { + allSkipped = false + } + } + + // Determine the new status for the parent + var newStatus gc.PipelineTaskDetail_TaskState + if anyFailed { + newStatus = gc.PipelineTaskDetail_FAILED + } else if allCached { + newStatus = gc.PipelineTaskDetail_CACHED + } else if allSkipped { + newStatus = gc.PipelineTaskDetail_SKIPPED + } else { + newStatus = gc.PipelineTaskDetail_SUCCEEDED + } + + // Update the parent task status + parentTask.State = newStatus + parentTask.EndTime = timestamppb.New(time.Now()) + _, err := kfpAPIClient.UpdateTask(ctx, &gc.UpdateTaskRequest{ + TaskId: parentTask.GetTaskId(), + Task: parentTask, + }) + if err != nil { + return fmt.Errorf("failed to update parent task %s status to %s: %w", parentTask.GetTaskId(), newStatus, err) + } + + return nil +} diff --git a/backend/src/v2/apiclient/kfpapi/mock.go b/backend/src/v2/apiclient/kfpapi/mock.go new file mode 100644 index 00000000000..f1e41f42071 --- /dev/null +++ b/backend/src/v2/apiclient/kfpapi/mock.go @@ -0,0 +1,443 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package kfpapi provides KFP API client implementation. +package kfpapi + +import ( + "context" + "fmt" + "strings" + + "github.com/google/uuid" + apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/encoding/prototext" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/known/structpb" +) + +// MockAPI provides a mock implementation of API for testing +type MockAPI struct { + runs map[string]*apiv2beta1.Run + tasks map[string]*apiv2beta1.PipelineTaskDetail + artifacts map[string]*apiv2beta1.Artifact + artifactTasks map[string]*apiv2beta1.ArtifactTask + pipelineVersions map[string]*apiv2beta1.PipelineVersion +} + +// NewMockAPI creates a new mock API +func NewMockAPI() *MockAPI { + return &MockAPI{ + runs: make(map[string]*apiv2beta1.Run), + tasks: make(map[string]*apiv2beta1.PipelineTaskDetail), + artifacts: make(map[string]*apiv2beta1.Artifact), + artifactTasks: make(map[string]*apiv2beta1.ArtifactTask), + pipelineVersions: make(map[string]*apiv2beta1.PipelineVersion), + } +} + +func (m *MockAPI) GetRun(_ context.Context, req *apiv2beta1.GetRunRequest) (*apiv2beta1.Run, error) { + if run, exists := m.runs[req.RunId]; exists { + // Create a copy of the run to populate with tasks + populatedRun := &apiv2beta1.Run{ + RunId: run.RunId, + DisplayName: run.DisplayName, + PipelineSource: &apiv2beta1.Run_PipelineSpec{PipelineSpec: run.GetPipelineSpec()}, + RuntimeConfig: run.RuntimeConfig, + State: run.State, + Tasks: []*apiv2beta1.PipelineTaskDetail{}, + } + + // Find all tasks for this run + for _, task := range m.tasks { + if task.RunId == req.RunId { + // Create a copy of the task to populate with artifacts + populatedTask := m.hydrateTask(task) + populatedRun.Tasks = append(populatedRun.Tasks, populatedTask) + } + } + return populatedRun, nil + } + return nil, fmt.Errorf("run not found: %s", req.RunId) +} + +func (m *MockAPI) hydrateTask(task *apiv2beta1.PipelineTaskDetail) *apiv2beta1.PipelineTaskDetail { + // Create a copy of the task to populate with artifacts + populatedTask := proto.Clone(task).(*apiv2beta1.PipelineTaskDetail) + populatedTask.Inputs = &apiv2beta1.PipelineTaskDetail_InputOutputs{} + populatedTask.Outputs = &apiv2beta1.PipelineTaskDetail_InputOutputs{} + + // Copy existing parameters if they exist + if task.Inputs != nil { + populatedTask.Inputs.Parameters = task.Inputs.Parameters + } + if task.Outputs != nil { + populatedTask.Outputs.Parameters = task.Outputs.Parameters + } + + // Find artifacts associated with this task + var inputArtifacts []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOArtifact + var outputArtifacts []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOArtifact + + for _, artifactTask := range m.artifactTasks { + if artifactTask.TaskId == task.TaskId { + // Get the associated artifact + if artifact, exists := m.artifacts[artifactTask.ArtifactId]; exists { + ioArtifact := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOArtifact{ + Artifacts: []*apiv2beta1.Artifact{artifact}, + Type: artifactTask.Type, + } + + ioArtifact.ArtifactKey = artifactTask.Key + ioArtifact.Producer = artifactTask.Producer + + // Determine if this is an input or output artifact based on ArtifactTaskType + switch artifactTask.Type { + case apiv2beta1.IOType_COMPONENT_INPUT, + apiv2beta1.IOType_ITERATOR_INPUT, + apiv2beta1.IOType_RUNTIME_VALUE_INPUT, + apiv2beta1.IOType_COMPONENT_DEFAULT_INPUT, + apiv2beta1.IOType_TASK_OUTPUT_INPUT, + apiv2beta1.IOType_COLLECTED_INPUTS, + apiv2beta1.IOType_ITERATOR_INPUT_RAW: + inputArtifacts = append(inputArtifacts, ioArtifact) + case apiv2beta1.IOType_OUTPUT, + apiv2beta1.IOType_ITERATOR_OUTPUT, + apiv2beta1.IOType_ONE_OF_OUTPUT, + apiv2beta1.IOType_TASK_FINAL_STATUS_OUTPUT: + outputArtifacts = append(outputArtifacts, ioArtifact) + } + } + } + } + + // Set the artifacts on the task + populatedTask.Inputs.Artifacts = inputArtifacts + populatedTask.Outputs.Artifacts = outputArtifacts + + return populatedTask +} + +func (m *MockAPI) CreateTask(_ context.Context, req *apiv2beta1.CreateTaskRequest) (*apiv2beta1.PipelineTaskDetail, error) { + task := req.Task + if task.TaskId == "" { + uuid, _ := uuid.NewRandom() + task.TaskId = uuid.String() + } + m.tasks[task.TaskId] = task + return task, nil +} + +func (m *MockAPI) UpdateTask(_ context.Context, req *apiv2beta1.UpdateTaskRequest) (*apiv2beta1.PipelineTaskDetail, error) { + if _, exists := m.tasks[req.TaskId]; !exists { + return nil, fmt.Errorf("task not found: %s", req.TaskId) + } + task := req.Task + task.TaskId = req.TaskId + m.tasks[req.TaskId] = task + task = m.hydrateTask(task) + return task, nil +} + +func (m *MockAPI) UpdateTasksBulk(_ context.Context, req *apiv2beta1.UpdateTasksBulkRequest) (*apiv2beta1.UpdateTasksBulkResponse, error) { + response := &apiv2beta1.UpdateTasksBulkResponse{ + Tasks: make(map[string]*apiv2beta1.PipelineTaskDetail), + } + + for taskID, task := range req.Tasks { + if _, exists := m.tasks[taskID]; !exists { + return nil, fmt.Errorf("task not found: %s", taskID) + } + task.TaskId = taskID + m.tasks[taskID] = task + hydratedTask := m.hydrateTask(task) + response.Tasks[taskID] = hydratedTask + } + + return response, nil +} + +func (m *MockAPI) GetTask(_ context.Context, req *apiv2beta1.GetTaskRequest) (*apiv2beta1.PipelineTaskDetail, error) { + if _, exists := m.tasks[req.TaskId]; exists { + task := m.hydrateTask(m.tasks[req.TaskId]) + return task, nil + } + + return nil, fmt.Errorf("task not found: %s", req.TaskId) +} + +func (m *MockAPI) ListTasks(_ context.Context, req *apiv2beta1.ListTasksRequest) (*apiv2beta1.ListTasksResponse, error) { + var tasks []*apiv2beta1.PipelineTaskDetail + + var predicates []*apiv2beta1.Predicate + if req.GetFilter() != "" { + raw := strings.TrimSpace(req.GetFilter()) + filter := &apiv2beta1.Filter{} + + // First, try parsing as proto text format (matches filter.String()). + if err := prototext.Unmarshal([]byte(raw), filter); err != nil { + // Fallback to JSON. Support raw array of predicates by wrapping. + if len(raw) > 0 && raw[0] == '[' { + raw = `{"predicates":` + raw + `}` + } + if jerr := protojson.Unmarshal([]byte(raw), filter); jerr != nil { + return nil, fmt.Errorf("failed to parse filter; textproto error: %v; json error: %v", err, jerr) + } + } + predicates = filter.GetPredicates() + } + + // Filter by run ID if specified + if runID := req.GetRunId(); runID != "" { + for _, task := range m.tasks { + if task.RunId == runID { + tasks = append(tasks, task) + } + } + } else if parentID := req.GetParentId(); parentID != "" { + // Filter by parent task ID + for _, task := range m.tasks { + if task.ParentTaskId != nil && *task.ParentTaskId == parentID { + tasks = append(tasks, task) + } + } + } else { + // Return all tasks + for _, task := range m.tasks { + tasks = append(tasks, task) + } + } + + // Just handle cache case for now + if len(predicates) == 2 { + var statusPredicate *apiv2beta1.Predicate + var fingerprintPredicate *apiv2beta1.Predicate + + switch { + case predicates[0].Key == "status" && predicates[1].Key == "cache_fingerprint": + statusPredicate = predicates[0] + fingerprintPredicate = predicates[1] + case predicates[1].Key == "status" && predicates[0].Key == "cache_fingerprint": + statusPredicate = predicates[1] + fingerprintPredicate = predicates[0] + default: + return nil, fmt.Errorf("only cache filter supported in mock library: %s", req.GetFilter()) + } + + var filtered []*apiv2beta1.PipelineTaskDetail + status := statusPredicate.GetIntValue() + fingerprint := fingerprintPredicate.GetStringValue() + for _, t := range tasks { + if int32(t.GetState().Number()) == status && t.GetCacheFingerprint() == fingerprint { + filtered = append(filtered, t) + } + + } + tasks = filtered + } + + var hydratedTasks []*apiv2beta1.PipelineTaskDetail + for _, task := range tasks { + hydratedTasks = append(hydratedTasks, m.hydrateTask(task)) + } + + return &apiv2beta1.ListTasksResponse{ + Tasks: hydratedTasks, + TotalSize: int32(len(tasks)), + }, nil +} + +func (m *MockAPI) CreateArtifact(_ context.Context, req *apiv2beta1.CreateArtifactRequest) (*apiv2beta1.Artifact, error) { + artifact := req.Artifact + if artifact.ArtifactId == "" { + uuid, _ := uuid.NewRandom() + artifact.ArtifactId = uuid.String() + } + m.artifacts[artifact.ArtifactId] = artifact + + task := m.tasks[req.TaskId] + // Also create the artifact-task relationship + // This mimics what the real API server does + + // Get the task name if the task exists, otherwise use empty string + taskName := "" + if task != nil { + taskName = task.Name + } + + artifactTask := &apiv2beta1.ArtifactTask{ + ArtifactId: artifact.ArtifactId, + TaskId: req.TaskId, + RunId: req.RunId, + Key: req.ProducerKey, + Type: req.Type, + Producer: &apiv2beta1.IOProducer{ + TaskName: taskName, + }, + } + if req.IterationIndex != nil { + artifactTask.Producer.Iteration = req.IterationIndex + } + + // Generate ID for artifact task + atUUID, _ := uuid.NewRandom() + artifactTask.Id = atUUID.String() + m.artifactTasks[artifactTask.Id] = artifactTask + + return artifact, nil +} + +func (m *MockAPI) CreateArtifactsBulk(_ context.Context, req *apiv2beta1.CreateArtifactsBulkRequest) (*apiv2beta1.CreateArtifactsBulkResponse, error) { + response := &apiv2beta1.CreateArtifactsBulkResponse{ + Artifacts: make([]*apiv2beta1.Artifact, 0, len(req.Artifacts)), + } + + for _, artifactReq := range req.Artifacts { + artifact := artifactReq.Artifact + if artifact.ArtifactId == "" { + uuid, _ := uuid.NewRandom() + artifact.ArtifactId = uuid.String() + } + m.artifacts[artifact.ArtifactId] = artifact + + // Get the task name if the task exists, otherwise use empty string + task := m.tasks[artifactReq.TaskId] + taskName := "" + if task != nil { + taskName = task.Name + } + + // Also create the artifact-task relationship + artifactTask := &apiv2beta1.ArtifactTask{ + ArtifactId: artifact.ArtifactId, + TaskId: artifactReq.TaskId, + RunId: artifactReq.RunId, + Key: artifactReq.ProducerKey, + Type: artifactReq.Type, + Producer: &apiv2beta1.IOProducer{ + TaskName: taskName, + }, + } + if artifactReq.IterationIndex != nil { + artifactTask.Producer.Iteration = artifactReq.IterationIndex + } + + // Generate ID for artifact task + atUUID, _ := uuid.NewRandom() + artifactTask.Id = atUUID.String() + m.artifactTasks[artifactTask.Id] = artifactTask + + response.Artifacts = append(response.Artifacts, artifact) + } + + return response, nil +} + +func (m *MockAPI) ListArtifactTasks(_ context.Context, _ *apiv2beta1.ListArtifactTasksRequest) (*apiv2beta1.ListArtifactTasksResponse, error) { + var artifactTasks []*apiv2beta1.ArtifactTask + for _, at := range m.artifactTasks { + artifactTasks = append(artifactTasks, at) + } + return &apiv2beta1.ListArtifactTasksResponse{ + ArtifactTasks: artifactTasks, + TotalSize: int32(len(artifactTasks)), + }, nil +} + +func (m *MockAPI) ListArtifactsByURI(_ context.Context, uri string, namespace string) ([]*apiv2beta1.Artifact, error) { + var artifacts []*apiv2beta1.Artifact + for _, artifact := range m.artifacts { + if artifact.GetUri() == uri && artifact.GetNamespace() == namespace { + artifacts = append(artifacts, artifact) + } + } + return artifacts, nil +} + +func (m *MockAPI) CreateArtifactTask(_ context.Context, req *apiv2beta1.CreateArtifactTaskRequest) (*apiv2beta1.ArtifactTask, error) { + artifactTask := req.ArtifactTask + if artifactTask.Id == "" { + uuid, _ := uuid.NewRandom() + artifactTask.Id = uuid.String() + } + m.artifactTasks[artifactTask.Id] = artifactTask + return artifactTask, nil +} + +func (m *MockAPI) CreateArtifactTasks(_ context.Context, req *apiv2beta1.CreateArtifactTasksBulkRequest) (*apiv2beta1.CreateArtifactTasksBulkResponse, error) { + var createdTasks []*apiv2beta1.ArtifactTask + for _, at := range req.ArtifactTasks { + if at.Id == "" { + uuid, _ := uuid.NewRandom() + at.Id = uuid.String() + } + m.artifactTasks[at.Id] = at + createdTasks = append(createdTasks, at) + } + return &apiv2beta1.CreateArtifactTasksBulkResponse{ + ArtifactTasks: createdTasks, + }, nil +} + +func (m *MockAPI) GetPipelineVersion(_ context.Context, req *apiv2beta1.GetPipelineVersionRequest) (*apiv2beta1.PipelineVersion, error) { + key := req.PipelineId + ":" + req.PipelineVersionId + if pv, exists := m.pipelineVersions[key]; exists { + return pv, nil + } + return nil, fmt.Errorf("pipeline version not found: %s", key) +} + +func (m *MockAPI) FetchPipelineSpecFromRun(_ context.Context, run *apiv2beta1.Run) (*structpb.Struct, error) { + var pipelineSpecStruct *structpb.Struct + switch { + case run.GetPipelineSpec() != nil: + pipelineSpecStruct = run.GetPipelineSpec() + case run.GetPipelineVersionReference() != nil: + pvr := run.GetPipelineVersionReference() + pipeline, err := m.GetPipelineVersion(context.Background(), &apiv2beta1.GetPipelineVersionRequest{ + PipelineId: pvr.GetPipelineId(), + PipelineVersionId: pvr.GetPipelineVersionId(), + }) + if err != nil { + return nil, err + } + pipelineSpecStruct = pipeline.GetPipelineSpec() + default: + return nil, fmt.Errorf("pipeline spec is not set") + } + if pipelineSpecStruct == nil { + return nil, fmt.Errorf("pipeline spec is nil") + } + return pipelineSpecStruct, nil +} + +// AddRun adds a run to the mock for testing +func (m *MockAPI) AddRun(run *apiv2beta1.Run) { + if run.RunId == "" { + uuid, _ := uuid.NewRandom() + run.RunId = uuid.String() + } + m.runs[run.RunId] = run +} + +// AddPipelineVersion adds a pipeline version to the mock for testing +func (m *MockAPI) AddPipelineVersion(pipelineID, versionID string, version *apiv2beta1.PipelineVersion) { + key := pipelineID + ":" + versionID + m.pipelineVersions[key] = version +} + +func (m *MockAPI) UpdateStatuses(ctx context.Context, run *apiv2beta1.Run, pipelineSpec *structpb.Struct, currentTask *apiv2beta1.PipelineTaskDetail) error { + return updateStatuses(ctx, run, m, pipelineSpec, currentTask) +} diff --git a/backend/src/v2/cacheutils/cache.go b/backend/src/v2/cacheutils/cache.go index a242e9c8817..0cd624670fe 100644 --- a/backend/src/v2/cacheutils/cache.go +++ b/backend/src/v2/cacheutils/cache.go @@ -1,154 +1,19 @@ package cacheutils import ( - "context" "crypto/sha256" - "crypto/tls" "encoding/hex" "encoding/json" "fmt" - "google.golang.org/grpc/credentials" - "google.golang.org/grpc/credentials/insecure" - - "google.golang.org/grpc" "google.golang.org/protobuf/encoding/protojson" "google.golang.org/protobuf/types/known/structpb" - "github.com/golang/glog" "github.com/kubeflow/pipelines/api/v2alpha1/go/cachekey" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" - api "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" -) - -const ( - // MaxGRPCMessageSize contains max grpc message size supported by the client - MaxClientGRPCMessageSize = 100 * 1024 * 1024 - // The endpoint uses Kubernetes service DNS name with namespace: - // https://kubernetes.io/docs/concepts/services-networking/service/#dns - defaultKfpApiEndpoint = "ml-pipeline.kubeflow:8887" ) -type Client interface { - GetExecutionCache(fingerPrint, pipelineName, namespace string) (string, error) - CreateExecutionCache(ctx context.Context, task *api.Task) error - GenerateCacheKey( - inputs *pipelinespec.ExecutorInput_Inputs, - outputs *pipelinespec.ExecutorInput_Outputs, - outputParametersTypeMap map[string]string, - cmdArgs []string, image string, - pvcNames []string, - ) (*cachekey.CacheKey, error) - GenerateFingerPrint(cacheKey *cachekey.CacheKey) (string, error) -} - -type disabledCacheClient struct{} - -var _ Client = &disabledCacheClient{} - -func (d disabledCacheClient) GenerateCacheKey( - *pipelinespec.ExecutorInput_Inputs, - *pipelinespec.ExecutorInput_Outputs, - map[string]string, - []string, - string, - []string, -) (*cachekey.CacheKey, error) { - panic("GenerateCacheKey is not supposed to be called when cache is disabled") -} - -func (d disabledCacheClient) GenerateFingerPrint(*cachekey.CacheKey) (string, error) { - panic("GenerateFingerPrint is not supposed to be called when cache is disabled") -} - -func (d disabledCacheClient) GetExecutionCache(string, string, string) (string, error) { - panic("GetExecutionCache is not supposed to be called when cache is disabled") -} - -func (d disabledCacheClient) CreateExecutionCache(context.Context, *api.Task) error { - panic("CreateExecutionCache is not supposed to be called when cache is disabled") -} - -// Client is an KFP service client. -type client struct { - svc api.TaskServiceClient -} - -var _ Client = &client{} - -// NewClient creates a Client. -func NewClient(cacheDisabled bool, tlsCfg *tls.Config) (Client, error) { - if cacheDisabled { - return &disabledCacheClient{}, nil - } - - creds := insecure.NewCredentials() - if tlsCfg != nil { - creds = credentials.NewTLS(tlsCfg) - } - glog.Infof("Connecting to cache endpoint %s", defaultKfpApiEndpoint) - conn, err := grpc.NewClient( - defaultKfpApiEndpoint, - grpc.WithDefaultCallOptions(grpc.MaxCallRecvMsgSize(MaxClientGRPCMessageSize)), - grpc.WithTransportCredentials(creds), - ) - - if err != nil { - return nil, fmt.Errorf("metadata.NewClient() failed: %w", err) - } - - return &client{ - svc: api.NewTaskServiceClient(conn), - }, nil -} - -func (c *client) GetExecutionCache(fingerPrint, pipelineName, namespace string) (string, error) { - fingerPrintPredicate := &api.Predicate{ - Op: api.Predicate_EQUALS, - Key: "fingerprint", - Value: &api.Predicate_StringValue{StringValue: fingerPrint}, - } - pipelineNamePredicate := &api.Predicate{ - Op: api.Predicate_EQUALS, - Key: "pipelineName", - Value: &api.Predicate_StringValue{StringValue: pipelineName}, - } - namespacePredicate := &api.Predicate{ - Op: api.Predicate_EQUALS, - Key: "namespace", - Value: &api.Predicate_StringValue{StringValue: namespace}, - } - filter := api.Filter{Predicates: []*api.Predicate{fingerPrintPredicate, pipelineNamePredicate, namespacePredicate}} - - taskFilterJson, err := protojson.Marshal(&filter) - if err != nil { - return "", fmt.Errorf("failed to convert filter into JSON: %w", err) - } - listTasksReuqest := &api.ListTasksRequest{Filter: string(taskFilterJson), SortBy: "created_at desc", PageSize: 1} - listTasksResponse, err := c.svc.ListTasksV1(context.Background(), listTasksReuqest) - if err != nil { - return "", fmt.Errorf("failed to list tasks: %w", err) - } - tasks := listTasksResponse.Tasks - if len(tasks) == 0 { - return "", nil - } else { - return tasks[0].GetMlmdExecutionID(), nil - } -} - -func (c *client) CreateExecutionCache(ctx context.Context, task *api.Task) error { - req := &api.CreateTaskRequest{ - Task: task, - } - _, err := c.svc.CreateTaskV1(ctx, req) - if err != nil { - return fmt.Errorf("failed to create task: %w", err) - } - return nil -} - -func (c *client) GenerateFingerPrint(cacheKey *cachekey.CacheKey) (string, error) { +func GenerateFingerPrint(cacheKey *cachekey.CacheKey) (string, error) { cacheKeyJsonBytes, err := protojson.Marshal(cacheKey) if err != nil { return "", fmt.Errorf("failed to marshal cache key with protojson: %w", err) @@ -170,7 +35,7 @@ func (c *client) GenerateFingerPrint(cacheKey *cachekey.CacheKey) (string, error return executionHashKey, nil } -func (c *client) GenerateCacheKey( +func GenerateCacheKey( inputs *pipelinespec.ExecutorInput_Inputs, outputs *pipelinespec.ExecutorInput_Outputs, outputParametersTypeMap map[string]string, diff --git a/backend/src/v2/cacheutils/cache_test.go b/backend/src/v2/cacheutils/cache_test.go index 9786a369a42..eb15f1bf02f 100644 --- a/backend/src/v2/cacheutils/cache_test.go +++ b/backend/src/v2/cacheutils/cache_test.go @@ -1,7 +1,6 @@ package cacheutils import ( - "crypto/tls" "encoding/json" "fmt" "testing" @@ -218,11 +217,9 @@ func TestGenerateCacheKey(t *testing.T) { wantErr: false, }, } - cacheClient, err := NewClient(false, &tls.Config{}) - require.NoError(t, err) for _, test := range tests { t.Run(test.name, func(t *testing.T) { - got, err := cacheClient.GenerateCacheKey(test.executorInputInputs, test.executorInputOutputs, test.outputParametersTypeMap, test.cmdArgs, test.image, test.pvcNames) + got, err := GenerateCacheKey(test.executorInputInputs, test.executorInputOutputs, test.outputParametersTypeMap, test.cmdArgs, test.image, test.pvcNames) if (err != nil) != test.wantErr { t.Errorf("GenerateCacheKey() error = %v", err) return @@ -339,13 +336,11 @@ func TestGenerateFingerPrint(t *testing.T) { fingerPrint: "3d9a2a778fa3174c6cfc6e639c507c265b5f21ef6e5b1dd70b236462cc6da464", }, } - cacheClient, err := NewClient(false, &tls.Config{}) - require.NoError(t, err) for _, test := range tests { t.Run(test.name, func(t *testing.T) { - fingerPrint, err := cacheClient.GenerateFingerPrint(cacheKey) + fingerPrint, err := GenerateFingerPrint(cacheKey) assert.Nil(t, err) - testFingerPrint, err := cacheClient.GenerateFingerPrint(test.cacheKey) + testFingerPrint, err := GenerateFingerPrint(test.cacheKey) assert.Nil(t, err) assert.Equal(t, fingerPrint == testFingerPrint, test.wantEqual) assert.Equal(t, test.fingerPrint, testFingerPrint) @@ -409,16 +404,13 @@ func TestGenerateFingerPrint_ConsidersPVCNames(t *testing.T) { }, } - cacheClient, err := NewClient(false, &tls.Config{}) - require.NoError(t, err) - - baseFP, err := cacheClient.GenerateFingerPrint(base) + baseFP, err := GenerateFingerPrint(base) require.NoError(t, err) - withPVCsFP, err := cacheClient.GenerateFingerPrint(withPVCs) + withPVCsFP, err := GenerateFingerPrint(withPVCs) require.NoError(t, err) - samePVCsFP, err := cacheClient.GenerateFingerPrint(samePVCs) + samePVCsFP, err := GenerateFingerPrint(samePVCs) require.NoError(t, err) - differentPVCsFP, err := cacheClient.GenerateFingerPrint(differentPVCs) + differentPVCsFP, err := GenerateFingerPrint(differentPVCs) require.NoError(t, err) // PVC names should affect the fingerprint when present diff --git a/backend/src/v2/client_manager/client_manager.go b/backend/src/v2/client_manager/client_manager.go index 9146cfe5440..f7a92a17d40 100644 --- a/backend/src/v2/client_manager/client_manager.go +++ b/backend/src/v2/client_manager/client_manager.go @@ -5,17 +5,14 @@ import ( "fmt" "github.com/kubeflow/pipelines/backend/src/common/util" - - "github.com/kubeflow/pipelines/backend/src/v2/cacheutils" - "github.com/kubeflow/pipelines/backend/src/v2/metadata" + "github.com/kubeflow/pipelines/backend/src/v2/apiclient" + "github.com/kubeflow/pipelines/backend/src/v2/apiclient/kfpapi" "k8s.io/client-go/kubernetes" - "k8s.io/client-go/rest" ) type ClientManagerInterface interface { K8sClient() kubernetes.Interface - MetadataClient() metadata.ClientInterface - CacheClient() cacheutils.Client + KFPAPIClient() kfpapi.API } // Ensure ClientManager implements ClientManagerInterface @@ -23,17 +20,13 @@ var _ ClientManagerInterface = (*ClientManager)(nil) // ClientManager is a container for various service clients. type ClientManager struct { - k8sClient kubernetes.Interface - metadataClient metadata.ClientInterface - cacheClient cacheutils.Client + k8sClient kubernetes.Interface + kfpAPIClient kfpapi.API } type Options struct { - MLMDServerAddress string - MLMDServerPort string - CacheDisabled bool - CaCertPath string - MLMDTLSEnabled bool + MLPipelineTLSEnabled bool + CaCertPath string } // NewClientManager creates and Init a new instance of ClientManager. @@ -51,18 +44,14 @@ func (cm *ClientManager) K8sClient() kubernetes.Interface { return cm.k8sClient } -func (cm *ClientManager) MetadataClient() metadata.ClientInterface { - return cm.metadataClient -} - -func (cm *ClientManager) CacheClient() cacheutils.Client { - return cm.cacheClient +func (cm *ClientManager) KFPAPIClient() kfpapi.API { + return cm.kfpAPIClient } func (cm *ClientManager) init(opts *Options) error { var tlsCfg *tls.Config var err error - if opts.MLMDTLSEnabled { + if opts.MLPipelineTLSEnabled { tlsCfg, err = util.GetTLSConfig(opts.CaCertPath) if err != nil { return err @@ -72,24 +61,23 @@ func (cm *ClientManager) init(opts *Options) error { if err != nil { return err } - metadataClient, err := initMetadataClient(opts.MLMDServerAddress, opts.MLMDServerPort, tlsCfg) - if err != nil { - return err - } - cacheClient, err := initCacheClient(opts.CacheDisabled, tlsCfg) - if err != nil { - return err - } cm.k8sClient = k8sClient - cm.metadataClient = metadataClient - cm.cacheClient = cacheClient + + // Initialize connection to new KFP v2beta1 API server + apiCfg := apiclient.FromEnv() + kfpAPIClient, apiErr := apiclient.New(apiCfg, tlsCfg) + if apiErr != nil { + return fmt.Errorf("failed to init KFP API client: %w", apiErr) + } + var kfpAPI = kfpapi.New(kfpAPIClient) + cm.kfpAPIClient = kfpAPI return nil } func initK8sClient() (kubernetes.Interface, error) { - restConfig, err := rest.InClusterConfig() + restConfig, err := util.GetKubernetesConfig() if err != nil { - return nil, fmt.Errorf("failed to initialize kubernetes client: %w", err) + return nil, err } k8sClient, err := kubernetes.NewForConfig(restConfig) if err != nil { @@ -97,11 +85,3 @@ func initK8sClient() (kubernetes.Interface, error) { } return k8sClient, nil } - -func initMetadataClient(address string, port string, tlsCfg *tls.Config) (metadata.ClientInterface, error) { - return metadata.NewClient(address, port, tlsCfg) -} - -func initCacheClient(cacheDisabled bool, tlsCfg *tls.Config) (cacheutils.Client, error) { - return cacheutils.NewClient(cacheDisabled, tlsCfg) -} diff --git a/backend/src/v2/client_manager/client_manager_fake.go b/backend/src/v2/client_manager/client_manager_fake.go index 918e1f72ac5..65b1b4cd5f0 100644 --- a/backend/src/v2/client_manager/client_manager_fake.go +++ b/backend/src/v2/client_manager/client_manager_fake.go @@ -1,15 +1,13 @@ package client_manager import ( - "github.com/kubeflow/pipelines/backend/src/v2/cacheutils" - "github.com/kubeflow/pipelines/backend/src/v2/metadata" + "github.com/kubeflow/pipelines/backend/src/v2/apiclient/kfpapi" "k8s.io/client-go/kubernetes" ) type FakeClientManager struct { - k8sClient kubernetes.Interface - metadataClient metadata.ClientInterface - cacheClient cacheutils.Client + k8sClient kubernetes.Interface + kfpAPI kfpapi.API } // Ensure FakeClientManager implements ClientManagerInterface @@ -19,18 +17,13 @@ func (f *FakeClientManager) K8sClient() kubernetes.Interface { return f.k8sClient } -func (f *FakeClientManager) MetadataClient() metadata.ClientInterface { - return f.metadataClient +func (f *FakeClientManager) KFPAPIClient() kfpapi.API { + return f.kfpAPI } -func (f *FakeClientManager) CacheClient() cacheutils.Client { - return f.cacheClient -} - -func NewFakeClientManager(k8sClient kubernetes.Interface, metadataClient metadata.ClientInterface, cacheClient cacheutils.Client) *FakeClientManager { +func NewFakeClientManager(k8sClient kubernetes.Interface, kfpAPI kfpapi.API) *FakeClientManager { return &FakeClientManager{ - k8sClient: k8sClient, - metadataClient: metadataClient, - cacheClient: cacheClient, + k8sClient: k8sClient, + kfpAPI: kfpAPI, } } diff --git a/backend/src/v2/cmd/driver/execution_paths.go b/backend/src/v2/cmd/driver/execution_paths.go index 584d29065d5..b3680ad70fc 100644 --- a/backend/src/v2/cmd/driver/execution_paths.go +++ b/backend/src/v2/cmd/driver/execution_paths.go @@ -1,7 +1,7 @@ package main -type ExecutionPaths struct { - ExecutionID string +type TaskPaths struct { + TaskID string IterationCount string CachedDecision string Condition string diff --git a/backend/src/v2/cmd/driver/main.go b/backend/src/v2/cmd/driver/main.go index 69149a2a0ea..a528f124fe0 100644 --- a/backend/src/v2/cmd/driver/main.go +++ b/backend/src/v2/cmd/driver/main.go @@ -16,15 +16,17 @@ package main import ( "bytes" "context" - "crypto/tls" "encoding/json" "flag" "fmt" - "google.golang.org/protobuf/encoding/protojson" - + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/config/proxy" "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/backend/src/v2/apiclient/kfpapi" + "github.com/kubeflow/pipelines/backend/src/v2/client_manager" + "github.com/kubeflow/pipelines/backend/src/v2/driver/common" + "google.golang.org/protobuf/encoding/protojson" "os" "path/filepath" @@ -32,10 +34,7 @@ import ( "github.com/golang/glog" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" - "github.com/kubeflow/pipelines/backend/src/v2/cacheutils" - "github.com/kubeflow/pipelines/backend/src/v2/config" "github.com/kubeflow/pipelines/backend/src/v2/driver" - "github.com/kubeflow/pipelines/backend/src/v2/metadata" "github.com/kubeflow/pipelines/kubernetes_platform/go/kubernetesplatform" ) @@ -64,16 +63,12 @@ var ( taskName = flag.String("task_name", "", "original task name, used for proper input resolution in the container/dag driver") // container inputs - dagExecutionID = flag.Int64("dag_execution_id", 0, "DAG execution ID") + parentTaskID = flag.String("parent_task_id", "", "Parent PipelineTask ID") containerSpecJson = flag.String("container", "{}", "container spec") k8sExecConfigJson = flag.String("kubernetes_config", "{}", "kubernetes executor config") - // config - mlmdServerAddress = flag.String("mlmd_server_address", "", "MLMD server address") - mlmdServerPort = flag.String("mlmd_server_port", "", "MLMD server port") - // output paths - executionIDPath = flag.String("execution_id_path", "", "Exeucution ID output path") + parentTaskIDPath = flag.String("parent_task_id_path", "", "Parent Task ID output path") iterationCountPath = flag.String("iteration_count_path", "", "Iteration Count output path") podSpecPatchPath = flag.String("pod_spec_patch_path", "", "Pod Spec Patch output path") // the value stored in the paths will be either 'true' or 'false' @@ -88,12 +83,9 @@ var ( publishLogs = flag.String("publish_logs", "true", "Whether to publish component logs to the object store") cacheDisabledFlag = flag.Bool("cache_disabled", false, "Disable cache globally.") mlPipelineTLSEnabled = flag.Bool("ml_pipeline_tls_enabled", false, "Set to true if mlpipeline API server serves over TLS.") - metadataTLSEnabled = flag.Bool("metadata_tls_enabled", false, "Set to true if MLMD serves over TLS.") caCertPath = flag.String("ca_cert_path", "", "The path to the CA certificate to trust on connections to the ML pipeline API server and metadata server.") ) -// func RootDAG(pipelineName string, runID string, component *pipelinespec.ComponentSpec, task *pipelinespec.PipelineTaskSpec, mlmd *metadata.Client) (*Execution, error) { - func main() { flag.Parse() @@ -105,7 +97,7 @@ func main() { err = drive() if err != nil { - glog.Exitf("%v", err) + glog.Exitf("Failed to execute driver: %v", err) } } @@ -134,12 +126,19 @@ func validate() error { } func drive() (err error) { - defer func() { - if err != nil { - err = fmt.Errorf("KFP driver: %w", err) - } - }() ctx := context.Background() + + // Initialize connection to the KFP API server + clientManagerOptions := &client_manager.Options{ + MLPipelineTLSEnabled: *mlPipelineTLSEnabled, + CaCertPath: *caCertPath, + } + clientManager, err := client_manager.NewClientManager(clientManagerOptions) + if err != nil { + return err + } + glog.Infof("Initialized Client Manager.") + if err = validate(); err != nil { return err } @@ -150,6 +149,7 @@ func drive() (err error) { if err := util.UnmarshalString(*componentSpecJson, componentSpec); err != nil { return fmt.Errorf("failed to unmarshal component spec, error: %w\ncomponentSpec: %v", err, prettyPrint(*componentSpecJson)) } + var taskSpec *pipelinespec.PipelineTaskSpec if *taskSpecJson != "" { glog.Infof("input TaskSpec:%s\n", prettyPrint(*taskSpecJson)) @@ -158,11 +158,13 @@ func drive() (err error) { return fmt.Errorf("failed to unmarshal task spec, error: %w\ntask: %v", err, taskSpecJson) } } + glog.Infof("input ContainerSpec:%s\n", prettyPrint(*containerSpecJson)) containerSpec := &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{} if err := util.UnmarshalString(*containerSpecJson, containerSpec); err != nil { return fmt.Errorf("failed to unmarshal container spec, error: %w\ncontainerSpec: %v", err, containerSpecJson) } + var runtimeConfig *pipelinespec.PipelineJob_RuntimeConfig if *runtimeConfigJson != "" { glog.Infof("input RuntimeConfig:%s\n", prettyPrint(*runtimeConfigJson)) @@ -171,79 +173,102 @@ func drive() (err error) { return fmt.Errorf("failed to unmarshal runtime config, error: %w\nruntimeConfig: %v", err, runtimeConfigJson) } } + k8sExecCfg, err := parseExecConfigJson(k8sExecConfigJson) if err != nil { return err } - namespace, err := config.InPodNamespace() + + namespace := os.Getenv("NAMESPACE") + if namespace == "" { + return fmt.Errorf("NAMESPACE environment variable must be set") + } + + podName := os.Getenv("KFP_POD_NAME") + podUID := os.Getenv("KFP_POD_UID") + if podUID == "" || podName == "" { + return fmt.Errorf("KFP_POD_UID and KFP_POD_NAME environment variables must be set") + } + + if runID == nil { + return fmt.Errorf("argument --run_id must be specified") + } + fullView := go_client.GetRunRequest_FULL + run, err := clientManager.KFPAPIClient().GetRun(ctx, &go_client.GetRunRequest{RunId: *runID, View: &fullView}) if err != nil { return err } - var tlsCfg *tls.Config - if *metadataTLSEnabled { - tlsCfg, err = util.GetTLSConfig(*caCertPath) + + var parentTask *go_client.PipelineTaskDetail + if parentTaskID != nil && *parentTaskID != "" { + parentTask, err = clientManager.KFPAPIClient().GetTask(ctx, &go_client.GetTaskRequest{TaskId: *parentTaskID}) if err != nil { return err } } - client, err := newMlmdClient(tlsCfg) - if err != nil { - return err + + // Argo Compiler does not always pass task name, so we infer it from the task spec. + // In the future we should require the task name to be passed explicitly. + // This will allow us to remove the need for a taskspec and component spec to be + // passed into the driver (we can infer it from the scope path and taskname). + var resolvedTaskName string + if *driverType != ROOT_DAG { + if *taskName != "" { + resolvedTaskName = *taskName + } else { + return fmt.Errorf("task name for non Root dag could not be resolved") + } } - cacheClient, err := cacheutils.NewClient(*cacheDisabledFlag, tlsCfg) - if err != nil { - return err + + scopePath, err := buildScopePath(ctx, run, parentTask, resolvedTaskName, clientManager.KFPAPIClient()) + if err != nil || scopePath == nil { + return fmt.Errorf("failed to build scope path: %w", err) } - options := driver.Options{ + + options := common.Options{ PipelineName: *pipelineName, - RunID: *runID, + Run: run, RunName: *runName, RunDisplayName: *runDisplayName, Namespace: namespace, Component: componentSpec, Task: taskSpec, - DAGExecutionID: *dagExecutionID, IterationIndex: *iterationIndex, PipelineLogLevel: *logLevel, PublishLogs: *publishLogs, CacheDisabled: *cacheDisabledFlag, DriverType: *driverType, - TaskName: *taskName, - MLMDServerAddress: *mlmdServerAddress, - MLMDServerPort: *mlmdServerPort, + TaskName: resolvedTaskName, + ParentTask: parentTask, + PodName: podName, + PodUID: podUID, + ScopePath: *scopePath, MLPipelineTLSEnabled: *mlPipelineTLSEnabled, - MLMDTLSEnabled: *metadataTLSEnabled, CaCertPath: *caCertPath, } var execution *driver.Execution - var driverErr error switch *driverType { case ROOT_DAG: options.RuntimeConfig = runtimeConfig - execution, driverErr = driver.RootDAG(ctx, options, client) + execution, err = driver.RootDAG(ctx, options, clientManager) case DAG: - execution, driverErr = driver.DAG(ctx, options, client) + execution, err = driver.DAG(ctx, options, clientManager) case CONTAINER: options.Container = containerSpec options.KubernetesExecutorConfig = k8sExecCfg - execution, driverErr = driver.Container(ctx, options, client, cacheClient) + execution, err = driver.Container(ctx, options, clientManager) default: err = fmt.Errorf("unknown driverType %s", *driverType) } - if driverErr != nil { - if execution == nil { - return driverErr - } - defer func() { - // Override error with driver error, because driver error is more important. - // However, we continue running, because the following code prints debug info that - // may be helpful for figuring out why this failed. - err = driverErr - }() + if err != nil { + return fmt.Errorf("failed to execute driver: %w", err) + } + if execution == nil { + return fmt.Errorf("driver execution is nil") } - executionPaths := &ExecutionPaths{ - ExecutionID: *executionIDPath, + executionPaths := &TaskPaths{ + TaskID: *parentTaskIDPath, IterationCount: *iterationCountPath, CachedDecision: *cachedDecisionPath, Condition: *conditionPath, @@ -265,15 +290,17 @@ func parseExecConfigJson(k8sExecConfigJson *string) (*kubernetesplatform.Kuberne return k8sExecCfg, nil } -func handleExecution(execution *driver.Execution, driverType string, executionPaths *ExecutionPaths) error { - if execution.ID != 0 { - glog.Infof("output execution.ID=%v", execution.ID) - if executionPaths.ExecutionID != "" { - if err := writeFile(executionPaths.ExecutionID, []byte(fmt.Sprint(execution.ID))); err != nil { - return fmt.Errorf("failed to write execution ID to file: %w", err) - } +func handleExecution(execution *driver.Execution, driverType string, executionPaths *TaskPaths) error { + if execution.TaskID == "" { + return fmt.Errorf("execution.TaskID is empty") + } + glog.Infof("output execution.ID=%v", execution.TaskID) + if executionPaths.TaskID != "" { + if err := writeFile(executionPaths.TaskID, []byte(fmt.Sprint(execution.TaskID))); err != nil { + return fmt.Errorf("failed to write execution ID to file: %w", err) } } + if execution.IterationCount != nil { if err := writeFile(executionPaths.IterationCount, []byte(fmt.Sprintf("%v", *execution.IterationCount))); err != nil { return fmt.Errorf("failed to write iteration count to file: %w", err) @@ -346,11 +373,42 @@ func writeFile(path string, data []byte) (err error) { return os.WriteFile(path, data, 0o644) } -func newMlmdClient(tlsCfg *tls.Config) (*metadata.Client, error) { - mlmdConfig := metadata.DefaultConfig() - if *mlmdServerAddress != "" && *mlmdServerPort != "" { - mlmdConfig.Address = *mlmdServerAddress - mlmdConfig.Port = *mlmdServerPort +// buildScopePath builds a ScopePath from the run, parentTask and taskName. +func buildScopePath( + ctx context.Context, + run *go_client.Run, + parentTask *go_client.PipelineTaskDetail, + taskName string, + kfpAPI kfpapi.API) (*util.ScopePath, error) { + pipelineSpecStruct, err := kfpAPI.FetchPipelineSpecFromRun(ctx, run) + if err != nil { + return nil, err + } + var scopePath util.ScopePath + if driverType == nil { + return nil, fmt.Errorf("argument --%s must be specified", driverTypeArg) + } + if *driverType == ROOT_DAG { + scopePath, err = util.NewScopePathFromStruct(pipelineSpecStruct) + if err != nil { + return nil, err + } + err = scopePath.Push("root") + if err != nil { + return nil, err + } + } else { + if taskName == "" { + return nil, fmt.Errorf("task name must be specified for non-root drivers") + } + scopePath, err = util.ScopePathFromStringPathWithNewTask( + pipelineSpecStruct, + parentTask.GetScopePath(), + taskName, + ) + if err != nil { + return nil, err + } } - return metadata.NewClient(mlmdConfig.Address, mlmdConfig.Port, tlsCfg) + return &scopePath, nil } diff --git a/backend/src/v2/cmd/driver/main_test.go b/backend/src/v2/cmd/driver/main_test.go index ad15a7b00e9..5f0e6207a16 100644 --- a/backend/src/v2/cmd/driver/main_test.go +++ b/backend/src/v2/cmd/driver/main_test.go @@ -52,9 +52,11 @@ func TestSpecParsing(t *testing.T) { } func Test_handleExecutionContainer(t *testing.T) { - execution := &driver.Execution{} + execution := &driver.Execution{ + TaskID: "test-task-id", + } - executionPaths := &ExecutionPaths{ + executionPaths := &TaskPaths{ Condition: "condition.txt", } @@ -70,9 +72,11 @@ func Test_handleExecutionContainer(t *testing.T) { } func Test_handleExecutionRootDAG(t *testing.T) { - execution := &driver.Execution{} + execution := &driver.Execution{ + TaskID: "test-task-id", + } - executionPaths := &ExecutionPaths{ + executionPaths := &TaskPaths{ IterationCount: "iteration_count.txt", Condition: "condition.txt", } @@ -89,9 +93,9 @@ func Test_handleExecutionRootDAG(t *testing.T) { cleanup(t, executionPaths) } -func cleanup(t *testing.T, executionPaths *ExecutionPaths) { +func cleanup(t *testing.T, executionPaths *TaskPaths) { removeIfExists(t, executionPaths.IterationCount) - removeIfExists(t, executionPaths.ExecutionID) + removeIfExists(t, executionPaths.TaskID) removeIfExists(t, executionPaths.Condition) removeIfExists(t, executionPaths.PodSpecPatch) removeIfExists(t, executionPaths.CachedDecision) diff --git a/backend/src/v2/cmd/launcher-v2/main.go b/backend/src/v2/cmd/launcher-v2/main.go index a793378b4d6..786d400c65e 100644 --- a/backend/src/v2/cmd/launcher-v2/main.go +++ b/backend/src/v2/cmd/launcher-v2/main.go @@ -19,35 +19,36 @@ import ( "context" "flag" "fmt" + "os" "github.com/golang/glog" + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/common/util" "github.com/kubeflow/pipelines/backend/src/v2/client_manager" "github.com/kubeflow/pipelines/backend/src/v2/component" - "github.com/kubeflow/pipelines/backend/src/v2/config" + "google.golang.org/protobuf/encoding/protojson" ) -// TODO: use https://github.com/spf13/cobra as a framework to create more complex CLI tools with subcommands. var ( copy = flag.String("copy", "", "copy this binary to specified destination path") pipelineName = flag.String("pipeline_name", "", "pipeline context name") runID = flag.String("run_id", "", "pipeline run uid") - parentDagID = flag.Int64("parent_dag_id", 0, "parent DAG execution ID") + taskID = flag.String("task_id", "", "pipeline task id (PipelineTaskDetail.task_id)") + parentTaskID = flag.String("parent_task_id", "", "Parent PipelineTask ID") executorType = flag.String("executor_type", "container", "The type of the ExecutorSpec") - executionID = flag.Int64("execution_id", 0, "Execution ID of this task.") executorInputJSON = flag.String("executor_input", "", "The JSON-encoded ExecutorInput.") - componentSpecJSON = flag.String("component_spec", "", "The JSON-encoded ComponentSpec.") + taskName = flag.String("task_name", "", "The name of the task.") importerSpecJSON = flag.String("importer_spec", "", "The JSON-encoded ImporterSpec.") - taskSpecJSON = flag.String("task_spec", "", "The JSON-encoded TaskSpec.") podName = flag.String("pod_name", "", "Kubernetes Pod name.") podUID = flag.String("pod_uid", "", "Kubernetes Pod UID.") - mlmdServerAddress = flag.String("mlmd_server_address", "", "The MLMD gRPC server address.") - mlmdServerPort = flag.String("mlmd_server_port", "8080", "The MLMD gRPC server port.") logLevel = flag.String("log_level", "1", "The verbosity level to log.") publishLogs = flag.String("publish_logs", "true", "Whether to publish component logs to the object store") cacheDisabledFlag = flag.Bool("cache_disabled", false, "Disable cache globally.") + fingerPrint = flag.String("fingerprint", "", "The fingerprint of the pipeline executor.") + iterationIndex = flag.Int("iteration_index", -1, "iteration index, -1 means not an interation") caCertPath = flag.String("ca_cert_path", "", "The path to the CA certificate to trust on connections to the ML pipeline API server and metadata server.") mlPipelineTLSEnabled = flag.Bool("ml_pipeline_tls_enabled", false, "Set to true if mlpipeline API server serves over TLS.") - metadataTLSEnabled = flag.Bool("metadata_tls_enabled", false, "Set to true if MLMD serves over TLS.") ) func main() { @@ -73,62 +74,123 @@ func run() error { // early return component.CopyThisBinary(*copy) } - namespace, err := config.InPodNamespace() + namespace := os.Getenv("NAMESPACE") + if namespace == "" { + return fmt.Errorf("NAMESPACE environment variable must be set") + } + + // Create a client manager + clientOptions := &client_manager.Options{ + MLPipelineTLSEnabled: *mlPipelineTLSEnabled, + CaCertPath: *caCertPath, + } + + clientManager, err := client_manager.NewClientManager(clientOptions) + if err != nil { + return fmt.Errorf("failed to create client manager: %w", err) + } + + // Fetch Run + kfpAPI := clientManager.KFPAPIClient() + fullView := go_client.GetRunRequest_FULL + pipelineRun, err := kfpAPI.GetRun(ctx, &go_client.GetRunRequest{RunId: *runID, View: &fullView}) + if err != nil { + return fmt.Errorf("failed to get run: %w", err) + } + + // Fetch Parent Task + if parentTaskID == nil || *parentTaskID == "" { + return fmt.Errorf("parent task id is nil or empty") + } + parentTask, err := kfpAPI.GetTask(ctx, &go_client.GetTaskRequest{TaskId: *parentTaskID}) + if err != nil { + return fmt.Errorf("failed to get parent task: %w", err) + } + + // Build scope path + pipelineSpecStruct, err := kfpAPI.FetchPipelineSpecFromRun(ctx, pipelineRun) if err != nil { return err } + var scopePath util.ScopePath + scopePath, err = util.ScopePathFromStringPathWithNewTask( + pipelineSpecStruct, + parentTask.GetScopePath(), + *taskName, + ) + if err != nil { + return fmt.Errorf("failed to build scope path: %w", err) + } + + componentSpec := scopePath.GetLast().GetComponentSpec() + taskSpec := scopePath.GetLast().GetTaskSpec() launcherV2Opts := &component.LauncherV2Options{ - Namespace: namespace, - PodName: *podName, - PodUID: *podUID, - MLMDServerAddress: *mlmdServerAddress, - MLMDServerPort: *mlmdServerPort, - PipelineName: *pipelineName, - RunID: *runID, - PublishLogs: *publishLogs, - CacheDisabled: *cacheDisabledFlag, - MLPipelineTLSEnabled: *mlPipelineTLSEnabled, - MLMDTLSEnabled: *metadataTLSEnabled, - CaCertPath: *caCertPath, + Namespace: namespace, + PodName: *podName, + PodUID: *podUID, + PipelineName: *pipelineName, + Run: pipelineRun, + ParentTask: parentTask, + PublishLogs: *publishLogs, + CacheDisabled: *cacheDisabledFlag, + CachedFingerprint: *fingerPrint, + ComponentSpec: componentSpec, + TaskSpec: taskSpec, + ScopePath: scopePath, + PipelineSpec: pipelineSpecStruct, + } + + if iterationIndex != nil && *iterationIndex > -1 { + launcherV2Opts.IterationIndex = util.Int64Pointer(int64(*iterationIndex)) } switch *executorType { case "importer": - importerLauncherOpts := &component.ImporterLauncherOptions{ - PipelineName: *pipelineName, - RunID: *runID, - ParentDagID: *parentDagID, + if importerSpecJSON == nil || *importerSpecJSON == "" { + return fmt.Errorf("importer spec is nil or empty") + } + importerSpec := &pipelinespec.PipelineDeploymentConfig_ImporterSpec{} + err = protojson.Unmarshal([]byte(*importerSpecJSON), importerSpec) + if err != nil { + return fmt.Errorf("failed to unmarshal importer spec: %w", err) } - importerLauncher, err := component.NewImporterLauncher(ctx, *componentSpecJSON, *importerSpecJSON, *taskSpecJSON, launcherV2Opts, importerLauncherOpts) + launcherV2Opts.ImporterSpec = importerSpec + importerLauncher, err := component.NewImporterLauncher( + launcherV2Opts, + clientManager, + ) if err != nil { - return err + return fmt.Errorf("failed to create importer launcher: %w", err) } if err := importerLauncher.Execute(ctx); err != nil { - return err + return fmt.Errorf("failed to execute importer launcher: %w", err) } return nil case "container": - clientOptions := &client_manager.Options{ - MLMDServerAddress: launcherV2Opts.MLMDServerAddress, - MLMDServerPort: launcherV2Opts.MLMDServerPort, - CacheDisabled: launcherV2Opts.CacheDisabled, - MLMDTLSEnabled: launcherV2Opts.MLMDTLSEnabled, - CaCertPath: launcherV2Opts.CaCertPath, + // Container task should have a pre-existing task created by the Driver + if taskID != nil && *taskID != "" { + task, err := kfpAPI.GetTask(ctx, &go_client.GetTaskRequest{TaskId: *taskID}) + if err != nil { + return fmt.Errorf("failed to get task: %w", err) + } + launcherV2Opts.Task = task + } else { + return fmt.Errorf("task id is nil or empty") } - clientManager, err := client_manager.NewClientManager(clientOptions) + launcher, err := component.NewLauncherV2( + *executorInputJSON, + flag.Args(), + launcherV2Opts, + clientManager, + ) if err != nil { - return err - } - launcher, err := component.NewLauncherV2(ctx, *executionID, *executorInputJSON, *componentSpecJSON, flag.Args(), launcherV2Opts, clientManager) - if err != nil { - return err + return fmt.Errorf("failed to create launcher: %w", err) } glog.V(5).Info(launcher.Info()) if err := launcher.Execute(ctx); err != nil { - return err + return fmt.Errorf("failed to execute launcher: %w", err) } - return nil } diff --git a/backend/src/v2/compiler/argocompiler/argo.go b/backend/src/v2/compiler/argocompiler/argo.go index 55e80c03914..84aaa0c1103 100644 --- a/backend/src/v2/compiler/argocompiler/argo.go +++ b/backend/src/v2/compiler/argocompiler/argo.go @@ -379,11 +379,10 @@ const ( paramContainer = "container" // container spec paramImporter = "importer" // importer spec paramRuntimeConfig = "runtime-config" // job runtime config, pipeline level inputs - paramParentDagID = "parent-dag-id" - paramExecutionID = "execution-id" + paramParentDagTaskID = "parent-dag-task-id" + paramParentDagTaskIDPath = "parent-dag-task-id-path" paramIterationCount = "iteration-count" paramIterationIndex = "iteration-index" - paramExecutorInput = "executor-input" paramDriverType = "driver-type" paramCachedDecision = "cached-decision" // indicate hit cache or not paramPodSpecPatch = "pod-spec-patch" // a strategic patch merged with the pod spec diff --git a/backend/src/v2/compiler/argocompiler/common.go b/backend/src/v2/compiler/argocompiler/common.go index bc2fa192aea..c0a5a2b5372 100644 --- a/backend/src/v2/compiler/argocompiler/common.go +++ b/backend/src/v2/compiler/argocompiler/common.go @@ -37,21 +37,52 @@ var metadataEnvFrom = k8score.EnvFromSource{ }, } -var commonEnvs = []k8score.EnvVar{{ - Name: "KFP_POD_NAME", - ValueFrom: &k8score.EnvVarSource{ - FieldRef: &k8score.ObjectFieldSelector{ - FieldPath: "metadata.name", +// KFP service account token configuration for authentication with API server +const ( + // kfpTokenExpirationSeconds is the expiration time for the projected service account token. + // Set to 7200 seconds (2 hours) to provide enough buffer while kubelet auto-rotates tokens. + kfpTokenExpirationSeconds = 7200 + // kfpTokenVolumeName is the name of the volume containing the KFP service account token + kfpTokenVolumeName = "kfp-launcher-token" + // kfpTokenMountPath is the path where the KFP token is mounted + kfpTokenMountPath = "/var/run/secrets/kfp" + // kfpTokenAudience is the audience for the projected service account token + kfpTokenAudience = "pipelines.kubeflow.org" +) + +// kfpTokenExpirationSecondsPtr returns a pointer to the KFP token expiration seconds constant. +// This is used for the ServiceAccountTokenProjection ExpirationSeconds field which requires *int64. +func kfpTokenExpirationSecondsPtr() *int64 { + seconds := int64(kfpTokenExpirationSeconds) + return &seconds +} + +var commonEnvs = []k8score.EnvVar{ + { + Name: "KFP_POD_NAME", + ValueFrom: &k8score.EnvVarSource{ + FieldRef: &k8score.ObjectFieldSelector{ + FieldPath: "metadata.name", + }, }, }, -}, { - Name: "KFP_POD_UID", - ValueFrom: &k8score.EnvVarSource{ - FieldRef: &k8score.ObjectFieldSelector{ - FieldPath: "metadata.uid", + { + Name: "KFP_POD_UID", + ValueFrom: &k8score.EnvVarSource{ + FieldRef: &k8score.ObjectFieldSelector{ + FieldPath: "metadata.uid", + }, }, }, -}} + { + Name: "NAMESPACE", + ValueFrom: &k8score.EnvVarSource{ + FieldRef: &k8score.ObjectFieldSelector{ + FieldPath: "metadata.namespace", + }, + }, + }, +} // ConfigureCustomCABundle adds CABundle environment variables and volume mounts if CABUNDLE_SECRET_NAME is set. func ConfigureCustomCABundle(tmpl *wfapi.Template) { @@ -91,7 +122,7 @@ func addExitTask(task *wfapi.DAGTask, exitTemplate string, parentDagID string) { wfapi.ExitLifecycleEvent: wfapi.LifecycleHook{ Template: exitTemplate, Arguments: wfapi.Arguments{Parameters: []wfapi.Parameter{ - {Name: paramParentDagID, Value: wfapi.AnyStringPtr(parentDagID)}, + {Name: paramParentDagTaskID, Value: wfapi.AnyStringPtr(parentDagID)}, }}, }, } diff --git a/backend/src/v2/compiler/argocompiler/container.go b/backend/src/v2/compiler/argocompiler/container.go index c65c5103f61..4e1f3b62b86 100644 --- a/backend/src/v2/compiler/argocompiler/container.go +++ b/backend/src/v2/compiler/argocompiler/container.go @@ -21,9 +21,8 @@ import ( "strconv" "strings" - "google.golang.org/protobuf/encoding/protojson" - "github.com/kubeflow/pipelines/backend/src/apiserver/config/proxy" + "google.golang.org/protobuf/encoding/protojson" "k8s.io/apimachinery/pkg/util/intstr" wfapi "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" @@ -156,7 +155,7 @@ func (c *workflowCompiler) containerDriverTask(name string, inputs containerDriv {Name: paramTask, Value: wfapi.AnyStringPtr(inputs.task)}, {Name: paramContainer, Value: wfapi.AnyStringPtr(inputs.container)}, {Name: paramTaskName, Value: wfapi.AnyStringPtr(inputs.taskName)}, - {Name: paramParentDagID, Value: wfapi.AnyStringPtr(inputs.parentDagID)}, + {Name: paramParentDagTaskID, Value: wfapi.AnyStringPtr(inputs.parentDagID)}, }, }, } @@ -193,7 +192,7 @@ func (c *workflowCompiler) addContainerDriverTemplate() string { "--run_id", runID(), "--run_name", runResourceName(), "--run_display_name", c.job.DisplayName, - "--dag_execution_id", inputValue(paramParentDagID), + "--parent_task_id", inputValue(paramParentDagTaskID), "--component", inputValue(paramComponent), "--task", inputValue(paramTask), "--task_name", inputValue(paramTaskName), @@ -213,12 +212,9 @@ func (c *workflowCompiler) addContainerDriverTemplate() string { if c.mlPipelineTLSEnabled { args = append(args, "--ml_pipeline_tls_enabled") } - if common.GetMetadataTLSEnabled() { - args = append(args, "--metadata_tls_enabled") - } setCABundle := false - if common.GetCaBundleSecretName() != "" && (c.mlPipelineTLSEnabled || common.GetMetadataTLSEnabled()) { + if common.GetCaBundleSecretName() != "" && c.mlPipelineTLSEnabled { args = append(args, "--ca_cert_path", common.TLSCertCAPath) setCABundle = true } @@ -238,7 +234,7 @@ func (c *workflowCompiler) addContainerDriverTemplate() string { {Name: paramTask}, {Name: paramContainer}, {Name: paramTaskName}, - {Name: paramParentDagID}, + {Name: paramParentDagTaskID}, {Name: paramIterationIndex, Default: wfapi.AnyStringPtr("-1")}, {Name: paramKubernetesConfig, Default: wfapi.AnyStringPtr("")}, }, @@ -255,7 +251,32 @@ func (c *workflowCompiler) addContainerDriverTemplate() string { Command: c.driverCommand, Args: args, Resources: driverResources, - Env: proxy.GetConfig().GetEnvVars(), + Env: append(proxy.GetConfig().GetEnvVars(), commonEnvs...), + VolumeMounts: []k8score.VolumeMount{ + { + Name: kfpTokenVolumeName, + MountPath: kfpTokenMountPath, + ReadOnly: true, + }, + }, + }, + Volumes: []k8score.Volume{ + { + Name: kfpTokenVolumeName, + VolumeSource: k8score.VolumeSource{ + Projected: &k8score.ProjectedVolumeSource{ + Sources: []k8score.VolumeProjection{ + { + ServiceAccountToken: &k8score.ServiceAccountTokenProjection{ + Path: "token", + Audience: kfpTokenAudience, + ExpirationSeconds: kfpTokenExpirationSecondsPtr(), + }, + }, + }, + }, + }, + }, }, } // If TLS is enabled (apiserver or metadata), add the custom CA bundle to the container driver template. @@ -449,6 +470,22 @@ func (c *workflowCompiler) addContainerExecutorTemplate(task *pipelinespec.Pipel EmptyDir: &k8score.EmptyDirVolumeSource{}, }, }, + { + Name: kfpTokenVolumeName, + VolumeSource: k8score.VolumeSource{ + Projected: &k8score.ProjectedVolumeSource{ + Sources: []k8score.VolumeProjection{ + { + ServiceAccountToken: &k8score.ServiceAccountTokenProjection{ + Path: "token", + Audience: kfpTokenAudience, + ExpirationSeconds: kfpTokenExpirationSecondsPtr(), + }, + }, + }, + }, + }, + }, { Name: gcsScratchName, VolumeSource: k8score.VolumeSource{ @@ -515,6 +552,11 @@ func (c *workflowCompiler) addContainerExecutorTemplate(task *pipelinespec.Pipel Name: volumeNameKFPLauncher, MountPath: component.VolumePathKFPLauncher, }, + { + Name: kfpTokenVolumeName, + MountPath: kfpTokenMountPath, + ReadOnly: true, + }, { Name: gcsScratchName, MountPath: gcsScratchLocation, @@ -545,7 +587,7 @@ func (c *workflowCompiler) addContainerExecutorTemplate(task *pipelinespec.Pipel }, } // If the apiserver is TLS-enabled, add the custom CA bundle to the executor. - if common.GetCaBundleSecretName() != "" && (c.mlPipelineTLSEnabled || common.GetMetadataTLSEnabled()) { + if common.GetCaBundleSecretName() != "" && c.mlPipelineTLSEnabled { ConfigureCustomCABundle(executor) } diff --git a/backend/src/v2/compiler/argocompiler/dag.go b/backend/src/v2/compiler/argocompiler/dag.go index 30c4a000834..c45951c430a 100644 --- a/backend/src/v2/compiler/argocompiler/dag.go +++ b/backend/src/v2/compiler/argocompiler/dag.go @@ -72,7 +72,7 @@ func (c *workflowCompiler) DAG(name string, componentSpec *pipelinespec.Componen } tasks, err := c.task(taskName, kfpTask, taskInputs{ - parentDagID: inputParameter(paramParentDagID), + parentDagID: inputParameter(paramParentDagTaskID), }) if err != nil { return err @@ -92,7 +92,7 @@ func (c *workflowCompiler) DAG(name string, componentSpec *pipelinespec.Componen Name: name, Inputs: wfapi.Inputs{ Parameters: []wfapi.Parameter{ - {Name: paramParentDagID}, + {Name: paramParentDagTaskID}, }, }, DAG: &wfapi.DAGTemplate{ @@ -110,7 +110,7 @@ func (c *workflowCompiler) DAG(name string, componentSpec *pipelinespec.Componen Name: c.templateName(name), Inputs: wfapi.Inputs{ Parameters: []wfapi.Parameter{ - {Name: paramParentDagID}, + {Name: paramParentDagTaskID}, }, }, DAG: &wfapi.DAGTemplate{}, @@ -126,7 +126,7 @@ func (c *workflowCompiler) DAG(name string, componentSpec *pipelinespec.Componen exitTemplate := taskToExitTemplate[taskName] tasks, err := c.task( - taskName, kfpTask, taskInputs{parentDagID: inputParameter(paramParentDagID), exitTemplate: exitTemplate}, + taskName, kfpTask, taskInputs{parentDagID: inputParameter(paramParentDagTaskID), exitTemplate: exitTemplate}, ) if err != nil { return err @@ -162,7 +162,7 @@ func (c *workflowCompiler) DAG(name string, componentSpec *pipelinespec.Componen return err } dag := c.dagTask("root", name, dagInputs{ - parentDagID: driverOutputs.executionID, + parentTaskID: driverOutputs.taskID, }) dag.Depends = depends([]string{driverTaskName}) entrypoint := &wfapi.Template{ @@ -180,8 +180,8 @@ func (c *workflowCompiler) DAG(name string, componentSpec *pipelinespec.Componen } type dagInputs struct { - // placeholder for parent DAG execution ID - parentDagID string + // placeholder for parent DAG task ID + parentTaskID string // if provided along with exitTemplate, this will be provided as the parent-dag-id input to the Argo Workflow exit // lifecycle hook. hookParentDagID string @@ -198,7 +198,7 @@ func (c *workflowCompiler) dagTask(name string, componentName string, inputs dag Name: name, Template: c.templateName(componentName), Arguments: wfapi.Arguments{Parameters: []wfapi.Parameter{ - {Name: paramParentDagID, Value: wfapi.AnyStringPtr(inputs.parentDagID)}, + {Name: paramParentDagTaskID, Value: wfapi.AnyStringPtr(inputs.parentTaskID)}, {Name: paramCondition, Value: wfapi.AnyStringPtr(inputs.condition)}, }}, } @@ -215,7 +215,7 @@ type taskInputs struct { exitTemplate string } -// parentDagID: placeholder for parent DAG execution ID +// parentTaskID: placeholder for parent DAG task ID func (c *workflowCompiler) task(name string, task *pipelinespec.PipelineTaskSpec, inputs taskInputs) (tasks []wfapi.DAGTask, err error) { defer func() { if err != nil { @@ -245,7 +245,7 @@ func (c *workflowCompiler) task(name string, task *pipelinespec.PipelineTaskSpec case *pipelinespec.ComponentSpec_Dag: driverTaskName := name + "-driver" driver, driverOutputs, err := c.dagDriverTask(driverTaskName, dagDriverInputs{ - parentDagID: inputs.parentDagID, + parentTaskID: inputs.parentDagID, component: componentSpecPlaceholder, task: taskSpecJson, iterationIndex: inputs.iterationIndex, @@ -260,7 +260,7 @@ func (c *workflowCompiler) task(name string, task *pipelinespec.PipelineTaskSpec driver.Depends = depends(task.GetDependentTasks()) } dag := c.dagTask(name, componentName, dagInputs{ - parentDagID: driverOutputs.executionID, + parentTaskID: driverOutputs.taskID, exitTemplate: inputs.exitTemplate, hookParentDagID: inputs.parentDagID, condition: driverOutputs.condition, @@ -326,7 +326,7 @@ func (c *workflowCompiler) task(name string, task *pipelinespec.PipelineTaskSpec // it's impossible to add a when condition based on driver outputs. return nil, fmt.Errorf("triggerPolicy.condition on importer task is not supported") } - importer, err := c.importerTask(name, task, taskSpecJson, inputs.parentDagID) + importer, err := c.importerTask(name, task, name, inputs.parentDagID) if err != nil { return nil, err } @@ -351,14 +351,14 @@ func (c *workflowCompiler) iteratorTask(name string, task *pipelinespec.Pipeline }() componentName := task.GetComponentRef().GetName() // Set up Loop Control Template - iteratorTasks, err := c.iterationItemTask("iteration", task, taskJson, parentDagID) + iteratorTasks, err := c.iterationItemTask("iteration", task, taskJson, parentDagID, name) if err != nil { return nil, err } loopTmpl := &wfapi.Template{ Inputs: wfapi.Inputs{ Parameters: []wfapi.Parameter{ - {Name: paramParentDagID}, + {Name: paramParentDagTaskID}, }, }, DAG: &wfapi.DAGTemplate{ @@ -383,7 +383,7 @@ func (c *workflowCompiler) iteratorTask(name string, task *pipelinespec.Pipeline Arguments: wfapi.Arguments{ Parameters: []wfapi.Parameter{ { - Name: paramParentDagID, + Name: paramParentDagTaskID, Value: wfapi.AnyStringPtr(parentDagID), }, }, @@ -393,7 +393,7 @@ func (c *workflowCompiler) iteratorTask(name string, task *pipelinespec.Pipeline return tasks, nil } -func (c *workflowCompiler) iterationItemTask(name string, task *pipelinespec.PipelineTaskSpec, taskJson string, parentDagID string) (tasks []wfapi.DAGTask, err error) { +func (c *workflowCompiler) iterationItemTask(name string, task *pipelinespec.PipelineTaskSpec, taskJSON string, parentDagID string, taskName string) (tasks []wfapi.DAGTask, err error) { defer func() { if err != nil { err = fmt.Errorf("iterationItem task: %w", err) @@ -408,9 +408,10 @@ func (c *workflowCompiler) iterationItemTask(name string, task *pipelinespec.Pip // Set up Iteration (Single Task) Template driverArgoName := name + "-driver" driverInputs := dagDriverInputs{ - component: componentSpecPlaceholder, - parentDagID: parentDagID, - task: taskJson, // TODO(Bobgy): avoid duplicating task JSON twice in the template. + component: componentSpecPlaceholder, + parentTaskID: parentDagID, + task: taskJSON, // TODO(Bobgy): avoid duplicating task JSON twice in the template. + taskName: taskName, // Pass the task key for proper input resolution } driver, driverOutputs, err := c.dagDriverTask(driverArgoName, driverInputs) if err != nil { @@ -418,32 +419,23 @@ func (c *workflowCompiler) iterationItemTask(name string, task *pipelinespec.Pip } iterationCount := intstr.FromString(driverOutputs.iterationCount) - iterationTasks, err := c.task( - "iteration-item", - task, - taskInputs{ - parentDagID: inputParameter(paramParentDagID), - iterationIndex: inputParameter(paramIterationIndex), - }, - ) - if err != nil { - return nil, err - } - iterationsTmpl := &wfapi.Template{ - Inputs: wfapi.Inputs{ - Parameters: []wfapi.Parameter{ - {Name: paramParentDagID}, - {Name: paramIterationIndex}, + + // for each driver add the iterationIndex + c.templates[componentName].Inputs.Parameters = append( + c.templates[componentName].Inputs.Parameters, + wfapi.Parameter{Name: paramIterationIndex}) + + for i := range c.templates[componentName].DAG.Tasks { + c.templates[componentName].DAG.Tasks[i].Arguments.Parameters = append( + c.templates[componentName].DAG.Tasks[i].Arguments.Parameters, wfapi.Parameter{ + Name: paramIterationIndex, + Value: wfapi.AnyStringPtr(inputParameter(paramIterationIndex)), }, - }, - DAG: &wfapi.DAGTemplate{ - Tasks: iterationTasks, - }, - } - iterationsTmplName, err := c.addTemplate(iterationsTmpl, componentName+"-"+name) - if err != nil { - return nil, err + ) + // Sync with wf.spec since we are updating this post-hoc template generation + c.syncTemplate(componentName) } + when := "" if task.GetTriggerPolicy().GetCondition() != "" { when = driverOutputs.condition + " != false" @@ -453,13 +445,13 @@ func (c *workflowCompiler) iterationItemTask(name string, task *pipelinespec.Pip *driver, { Name: name + "-iterations", - Template: iterationsTmplName, + Template: componentName, Depends: depends([]string{driverArgoName}), When: when, Arguments: wfapi.Arguments{ Parameters: []wfapi.Parameter{{ - Name: paramParentDagID, - Value: wfapi.AnyStringPtr(driverOutputs.executionID), + Name: paramParentDagTaskID, + Value: wfapi.AnyStringPtr(driverOutputs.taskID), }, { Name: paramIterationIndex, Value: wfapi.AnyStringPtr(loopItem()), @@ -472,13 +464,13 @@ func (c *workflowCompiler) iterationItemTask(name string, task *pipelinespec.Pip } type dagDriverOutputs struct { - executionID string + taskID string iterationCount string // only returned for iterator DAG drivers condition string // if false, the DAG is skipped } type dagDriverInputs struct { - parentDagID string // parent DAG execution ID. optional, the root DAG does not have parent + parentTaskID string // parent DAG Task ID. optional, the root DAG does not have parent component string // input placeholder for component spec task string // optional, the root DAG does not have task spec. taskName string // optional, the name of the task, used for input resolving @@ -500,10 +492,10 @@ func (c *workflowCompiler) dagDriverTask(name string, inputs dagDriverInputs) (* Value: wfapi.AnyStringPtr(inputs.iterationIndex), }) } - if inputs.parentDagID != "" { + if inputs.parentTaskID != "" { params = append(params, wfapi.Parameter{ - Name: paramParentDagID, - Value: wfapi.AnyStringPtr(inputs.parentDagID), + Name: paramParentDagTaskID, + Value: wfapi.AnyStringPtr(inputs.parentTaskID), }) } if inputs.runtimeConfig != nil { @@ -526,7 +518,7 @@ func (c *workflowCompiler) dagDriverTask(name string, inputs dagDriverInputs) (* }) } - if inputs.taskName != "" && inputs.taskName != "iteration-item" { + if inputs.taskName != "" { params = append(params, wfapi.Parameter{ Name: paramTaskName, Value: wfapi.AnyStringPtr(inputs.taskName), @@ -540,7 +532,7 @@ func (c *workflowCompiler) dagDriverTask(name string, inputs dagDriverInputs) (* }, } return t, &dagDriverOutputs{ - executionID: taskOutputParameter(name, paramExecutionID), + taskID: taskOutputParameter(name, paramParentDagTaskIDPath), iterationCount: taskOutputParameter(name, paramIterationCount), condition: taskOutputParameter(name, paramCondition), }, nil @@ -559,13 +551,13 @@ func (c *workflowCompiler) addDAGDriverTemplate() string { "--run_id", runID(), "--run_name", runResourceName(), "--run_display_name", c.job.DisplayName, - "--dag_execution_id", inputValue(paramParentDagID), + "--parent_task_id", inputValue(paramParentDagTaskID), "--component", inputValue(paramComponent), "--task", inputValue(paramTask), "--task_name", inputValue(paramTaskName), "--runtime_config", inputValue(paramRuntimeConfig), "--iteration_index", inputValue(paramIterationIndex), - "--execution_id_path", outputPath(paramExecutionID), + "--parent_task_id_path", outputPath(paramParentDagTaskIDPath), "--iteration_count_path", outputPath(paramIterationCount), "--condition_path", outputPath(paramCondition), "--http_proxy", proxy.GetConfig().GetHttpProxy(), @@ -578,12 +570,9 @@ func (c *workflowCompiler) addDAGDriverTemplate() string { if c.mlPipelineTLSEnabled { args = append(args, "--ml_pipeline_tls_enabled") } - if common.GetMetadataTLSEnabled() { - args = append(args, "--metadata_tls_enabled") - } setCABundle := false - if common.GetCaBundleSecretName() != "" && (c.mlPipelineTLSEnabled || common.GetMetadataTLSEnabled()) { + if common.GetCaBundleSecretName() != "" && c.mlPipelineTLSEnabled { args = append(args, "--ca_cert_path", common.TLSCertCAPath) setCABundle = true } @@ -603,14 +592,14 @@ func (c *workflowCompiler) addDAGDriverTemplate() string { {Name: paramRuntimeConfig, Default: wfapi.AnyStringPtr("")}, {Name: paramTask, Default: wfapi.AnyStringPtr("")}, {Name: paramTaskName, Default: wfapi.AnyStringPtr("")}, - {Name: paramParentDagID, Default: wfapi.AnyStringPtr("0")}, + {Name: paramParentDagTaskID, Default: wfapi.AnyStringPtr("")}, {Name: paramIterationIndex, Default: wfapi.AnyStringPtr("-1")}, {Name: paramDriverType, Default: wfapi.AnyStringPtr("DAG")}, }, }, Outputs: wfapi.Outputs{ Parameters: []wfapi.Parameter{ - {Name: paramExecutionID, ValueFrom: &wfapi.ValueFrom{Path: "/tmp/outputs/execution-id"}}, + {Name: paramParentDagTaskIDPath, ValueFrom: &wfapi.ValueFrom{Path: "/tmp/outputs/task-id"}}, {Name: paramIterationCount, ValueFrom: &wfapi.ValueFrom{Path: "/tmp/outputs/iteration-count", Default: wfapi.AnyStringPtr("0")}}, {Name: paramCondition, ValueFrom: &wfapi.ValueFrom{Path: "/tmp/outputs/condition", Default: wfapi.AnyStringPtr("true")}}, }, @@ -620,7 +609,32 @@ func (c *workflowCompiler) addDAGDriverTemplate() string { Command: c.driverCommand, Args: args, Resources: driverResources, - Env: proxy.GetConfig().GetEnvVars(), + Env: append(proxy.GetConfig().GetEnvVars(), commonEnvs...), + VolumeMounts: []k8score.VolumeMount{ + { + Name: kfpTokenVolumeName, + MountPath: kfpTokenMountPath, + ReadOnly: true, + }, + }, + }, + Volumes: []k8score.Volume{ + { + Name: kfpTokenVolumeName, + VolumeSource: k8score.VolumeSource{ + Projected: &k8score.ProjectedVolumeSource{ + Sources: []k8score.VolumeProjection{ + { + ServiceAccountToken: &k8score.ServiceAccountTokenProjection{ + Path: "token", + Audience: kfpTokenAudience, + ExpirationSeconds: kfpTokenExpirationSecondsPtr(), + }, + }, + }, + }, + }, + }, }, } // If TLS is enabled (apiserver or metadata), add the custom CA bundle to the DAG driver template. @@ -700,3 +714,18 @@ func depends(deps []string) string { } return builder.String() } + +func (c *workflowCompiler) syncTemplate(name string) { + t, ok := c.templates[name] + if !ok || t == nil { + return + } + for i := range c.wf.Spec.Templates { + if c.wf.Spec.Templates[i].Name == name { + c.wf.Spec.Templates[i] = *t + return + } + } + // Not found: append it. + c.wf.Spec.Templates = append(c.wf.Spec.Templates, *t) +} diff --git a/backend/src/v2/compiler/argocompiler/importer.go b/backend/src/v2/compiler/argocompiler/importer.go index ff0dd7d6c8f..7893738d0b2 100644 --- a/backend/src/v2/compiler/argocompiler/importer.go +++ b/backend/src/v2/compiler/argocompiler/importer.go @@ -33,11 +33,7 @@ func (c *workflowCompiler) Importer(name string, componentSpec *pipelinespec.Com return c.saveComponentImpl(name, importer) } -func (c *workflowCompiler) importerTask(name string, task *pipelinespec.PipelineTaskSpec, taskJSON string, parentDagID string) (*wfapi.DAGTask, error) { - componentPlaceholder, err := c.useComponentSpec(task.GetComponentRef().GetName()) - if err != nil { - return nil, err - } +func (c *workflowCompiler) importerTask(name string, task *pipelinespec.PipelineTaskSpec, taskName string, parentDagID string) (*wfapi.DAGTask, error) { importerPlaceholder, err := c.useComponentImpl(task.GetComponentRef().GetName()) if err != nil { return nil, err @@ -45,19 +41,18 @@ func (c *workflowCompiler) importerTask(name string, task *pipelinespec.Pipeline return &wfapi.DAGTask{ Name: name, Template: c.addImporterTemplate(), - Arguments: wfapi.Arguments{Parameters: []wfapi.Parameter{{ - Name: paramTask, - Value: wfapi.AnyStringPtr(taskJSON), - }, { - Name: paramComponent, - Value: wfapi.AnyStringPtr(componentPlaceholder), - }, { - Name: paramImporter, - Value: wfapi.AnyStringPtr(importerPlaceholder), - }, { - Name: paramParentDagID, - Value: wfapi.AnyStringPtr(parentDagID), - }}}, + Arguments: wfapi.Arguments{Parameters: []wfapi.Parameter{ + { + Name: paramTaskName, + Value: wfapi.AnyStringPtr(taskName), + }, + { + Name: paramImporter, + Value: wfapi.AnyStringPtr(importerPlaceholder), + }, { + Name: paramParentDagTaskID, + Value: wfapi.AnyStringPtr(parentDagID), + }}}, }, nil } @@ -68,20 +63,15 @@ func (c *workflowCompiler) addImporterTemplate() string { } args := []string{ "--executor_type", "importer", - "--task_spec", inputValue(paramTask), - "--component_spec", inputValue(paramComponent), + "--task_name", inputValue(paramTaskName), "--importer_spec", inputValue(paramImporter), "--pipeline_name", c.spec.PipelineInfo.GetName(), "--run_id", runID(), - "--parent_dag_id", inputValue(paramParentDagID), + "--parent_task_id", inputValue(paramParentDagTaskID), "--pod_name", fmt.Sprintf("$(%s)", component.EnvPodName), "--pod_uid", fmt.Sprintf("$(%s)", component.EnvPodUID), - "--mlmd_server_address", - fmt.Sprintf("$(%s)", component.EnvMetadataHost), - "--mlmd_server_port", - fmt.Sprintf("$(%s)", component.EnvMetadataPort), } if c.cacheDisabled { args = append(args, "--cache_disabled") @@ -89,12 +79,9 @@ func (c *workflowCompiler) addImporterTemplate() string { if c.mlPipelineTLSEnabled { args = append(args, "--ml_pipeline_tls_enabled") } - if common.GetMetadataTLSEnabled() { - args = append(args, "--metadata_tls_enabled") - } setCABundle := false - if common.GetCaBundleSecretName() != "" && (c.mlPipelineTLSEnabled || common.GetMetadataTLSEnabled()) { + if common.GetCaBundleSecretName() != "" && c.mlPipelineTLSEnabled { args = append(args, "--ca_cert_path", common.TLSCertCAPath) setCABundle = true } @@ -109,10 +96,9 @@ func (c *workflowCompiler) addImporterTemplate() string { Name: name, Inputs: wfapi.Inputs{ Parameters: []wfapi.Parameter{ - {Name: paramTask}, - {Name: paramComponent}, + {Name: paramTaskName}, {Name: paramImporter}, - {Name: paramParentDagID}, + {Name: paramParentDagTaskID}, }, }, Container: &k8score.Container{ @@ -122,6 +108,31 @@ func (c *workflowCompiler) addImporterTemplate() string { EnvFrom: []k8score.EnvFromSource{metadataEnvFrom}, Env: commonEnvs, Resources: driverResources, + VolumeMounts: []k8score.VolumeMount{ + { + Name: kfpTokenVolumeName, + MountPath: kfpTokenMountPath, + ReadOnly: true, + }, + }, + }, + Volumes: []k8score.Volume{ + { + Name: kfpTokenVolumeName, + VolumeSource: k8score.VolumeSource{ + Projected: &k8score.ProjectedVolumeSource{ + Sources: []k8score.VolumeProjection{ + { + ServiceAccountToken: &k8score.ServiceAccountTokenProjection{ + Path: "token", + Audience: kfpTokenAudience, + ExpirationSeconds: kfpTokenExpirationSecondsPtr(), + }, + }, + }, + }, + }, + }, }, } diff --git a/backend/src/v2/component/batch_updater.go b/backend/src/v2/component/batch_updater.go new file mode 100644 index 00000000000..88f031501f0 --- /dev/null +++ b/backend/src/v2/component/batch_updater.go @@ -0,0 +1,252 @@ +// Copyright 2021-2023 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package component provides component launcher functionality for KFP v2. +package component + +import ( + "context" + "fmt" + + "github.com/golang/glog" + apiV2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/v2/apiclient/kfpapi" +) + +// BatchUpdater collects API updates during execution and flushes them +// in batches to reduce database round-trips. +type BatchUpdater struct { + // Map of task ID to the latest task update + // Using a map automatically deduplicates multiple updates to the same task + taskUpdates map[string]*apiV2beta1.PipelineTaskDetail + + // Artifact-task relationships to create + // We can use the existing bulk API for these + artifactTasks []*apiV2beta1.ArtifactTask + + // Artifacts to create + // These need to be created before artifact-tasks that reference them + artifacts []*createArtifactRequest + + // Metrics for tracking improvement + queuedTaskUpdates int + queuedArtifactTasks int + queuedArtifacts int + dedupedTaskUpdates int + actualTaskUpdateCalls int + actualArtifactCalls int + actualArtifactTaskCalls int +} + +// createArtifactRequest stores the full context needed to create an artifact +type createArtifactRequest struct { + request *apiV2beta1.CreateArtifactRequest +} + +// NewBatchUpdater creates a new BatchUpdater +func NewBatchUpdater() *BatchUpdater { + return &BatchUpdater{ + taskUpdates: make(map[string]*apiV2beta1.PipelineTaskDetail), + artifactTasks: make([]*apiV2beta1.ArtifactTask, 0), + artifacts: make([]*createArtifactRequest, 0), + } +} + +// QueueTaskUpdate queues a task update. If the same task is updated multiple times, +// the updates are merged (parameters and artifacts are accumulated, status is taken from latest). +func (b *BatchUpdater) QueueTaskUpdate(task *apiV2beta1.PipelineTaskDetail) { + if task == nil || task.TaskId == "" { + glog.Warning("Attempted to queue nil task or task with empty ID") + return + } + + // Check if we already have an update for this task + if existingTask, exists := b.taskUpdates[task.TaskId]; exists { + b.dedupedTaskUpdates++ + glog.V(2).Infof("Merging task update for task %s", task.TaskId) + + // Merge the updates: + // 1. Accumulate output parameters (append new ones) + if task.Outputs != nil && len(task.Outputs.Parameters) > 0 { + if existingTask.Outputs == nil { + existingTask.Outputs = &apiV2beta1.PipelineTaskDetail_InputOutputs{} + } + existingTask.Outputs.Parameters = append(existingTask.Outputs.Parameters, task.Outputs.Parameters...) + } + + // 2. Accumulate output artifacts (append new ones) + if task.Outputs != nil && len(task.Outputs.Artifacts) > 0 { + if existingTask.Outputs == nil { + existingTask.Outputs = &apiV2beta1.PipelineTaskDetail_InputOutputs{} + } + existingTask.Outputs.Artifacts = append(existingTask.Outputs.Artifacts, task.Outputs.Artifacts...) + } + + // 3. Accumulate input parameters (append new ones) + if task.Inputs != nil && len(task.Inputs.Parameters) > 0 { + if existingTask.Inputs == nil { + existingTask.Inputs = &apiV2beta1.PipelineTaskDetail_InputOutputs{} + } + existingTask.Inputs.Parameters = append(existingTask.Inputs.Parameters, task.Inputs.Parameters...) + } + + // 4. Accumulate input artifacts (append new ones) + if task.Inputs != nil && len(task.Inputs.Artifacts) > 0 { + if existingTask.Inputs == nil { + existingTask.Inputs = &apiV2beta1.PipelineTaskDetail_InputOutputs{} + } + existingTask.Inputs.Artifacts = append(existingTask.Inputs.Artifacts, task.Inputs.Artifacts...) + } + + // 5. Take the latest status and timestamps + if task.State != apiV2beta1.PipelineTaskDetail_RUNTIME_STATE_UNSPECIFIED { + existingTask.State = task.State + } + if task.EndTime != nil { + existingTask.EndTime = task.EndTime + } + if task.StartTime != nil { + existingTask.StartTime = task.StartTime + } + } else { + // First update for this task + b.taskUpdates[task.TaskId] = task + } + + b.queuedTaskUpdates++ +} + +// QueueArtifactTask queues an artifact-task relationship to create +func (b *BatchUpdater) QueueArtifactTask(artifactTask *apiV2beta1.ArtifactTask) { + if artifactTask == nil { + glog.Warning("Attempted to queue nil artifact task") + return + } + + b.artifactTasks = append(b.artifactTasks, artifactTask) + b.queuedArtifactTasks++ +} + +// QueueArtifact queues an artifact to create +func (b *BatchUpdater) QueueArtifact(request *apiV2beta1.CreateArtifactRequest) { + if request == nil { + glog.Warning("Attempted to queue nil artifact") + return + } + + b.artifacts = append(b.artifacts, &createArtifactRequest{request: request}) + b.queuedArtifacts++ +} + +// Flush executes all queued updates in batches +// Order of operations: +// 1. Create artifacts (they need to exist before artifact-tasks can reference them) +// 2. Update tasks (task updates can happen in parallel with artifact creation) +// 3. Create artifact-tasks (these depend on artifacts existing) +func (b *BatchUpdater) Flush(ctx context.Context, client kfpapi.API) error { + if len(b.taskUpdates) == 0 && len(b.artifactTasks) == 0 && len(b.artifacts) == 0 { + glog.V(2).Info("BatchUpdater: No updates to flush") + return nil + } + + glog.Infof("BatchUpdater: Flushing %d task updates (deduped from %d), %d artifacts, %d artifact-tasks", + len(b.taskUpdates), b.queuedTaskUpdates, len(b.artifacts), len(b.artifactTasks)) + + // Print details about what we're flushing + for taskID, task := range b.taskUpdates { + glog.V(1).Infof(" Task update: %s, status=%v, outputs: %d params, %d artifacts", + taskID, task.State, + len(task.GetOutputs().GetParameters()), + len(task.GetOutputs().GetArtifacts())) + } + for i, artifact := range b.artifacts { + glog.V(1).Infof(" Artifact #%d: name=%s, taskID=%s, key=%s", + i, artifact.request.Artifact.Name, artifact.request.TaskId, artifact.request.ProducerKey) + } + for i, at := range b.artifactTasks { + glog.V(1).Infof(" ArtifactTask #%d: artifactID=%s, taskID=%s, key=%s, type=%v", + i, at.ArtifactId, at.TaskId, at.Key, at.Type) + } + + // Step 1: Create artifacts using bulk API + if len(b.artifacts) > 0 { + bulkReq := &apiV2beta1.CreateArtifactsBulkRequest{ + Artifacts: make([]*apiV2beta1.CreateArtifactRequest, 0, len(b.artifacts)), + } + for _, artifactReq := range b.artifacts { + bulkReq.Artifacts = append(bulkReq.Artifacts, artifactReq.request) + } + _, err := client.CreateArtifactsBulk(ctx, bulkReq) + if err != nil { + return fmt.Errorf("failed to create artifacts in bulk: %w", err) + } + b.actualArtifactCalls = 1 // Bulk call counts as 1 + } + + // Step 2: Update tasks using bulk API + if len(b.taskUpdates) > 0 { + _, err := client.UpdateTasksBulk(ctx, &apiV2beta1.UpdateTasksBulkRequest{ + Tasks: b.taskUpdates, + }) + if err != nil { + return fmt.Errorf("failed to update tasks in bulk: %w", err) + } + b.actualTaskUpdateCalls = 1 // Bulk call counts as 1 + } + + // Step 3: Create artifact-tasks using existing bulk API + if len(b.artifactTasks) > 0 { + _, err := client.CreateArtifactTasks(ctx, &apiV2beta1.CreateArtifactTasksBulkRequest{ + ArtifactTasks: b.artifactTasks, + }) + if err != nil { + return fmt.Errorf("failed to create artifact-tasks in bulk: %w", err) + } + b.actualArtifactTaskCalls = 1 // Bulk call counts as 1 + } + + // Log metrics + glog.Infof("BatchUpdater metrics - Queued: %d task updates, %d artifacts, %d artifact-tasks | "+ + "Deduped: %d task updates | Actual API calls: %d task updates, %d artifacts, %d artifact-task calls", + b.queuedTaskUpdates, b.queuedArtifacts, b.queuedArtifactTasks, + b.dedupedTaskUpdates, + b.actualTaskUpdateCalls, b.actualArtifactCalls, b.actualArtifactTaskCalls) + + // Reset for next batch + b.reset() + + return nil +} + +// reset clears all queued updates (called after flush) +func (b *BatchUpdater) reset() { + b.taskUpdates = make(map[string]*apiV2beta1.PipelineTaskDetail) + b.artifactTasks = make([]*apiV2beta1.ArtifactTask, 0) + b.artifacts = make([]*createArtifactRequest, 0) + + // Keep metrics across resets for the lifetime of the BatchUpdater +} + +// GetMetrics returns the current metrics +func (b *BatchUpdater) GetMetrics() map[string]int { + return map[string]int{ + "queued_task_updates": b.queuedTaskUpdates, + "queued_artifacts": b.queuedArtifacts, + "queued_artifact_tasks": b.queuedArtifactTasks, + "deduped_task_updates": b.dedupedTaskUpdates, + "actual_task_update_calls": b.actualTaskUpdateCalls, + "actual_artifact_calls": b.actualArtifactCalls, + "actual_artifact_task_calls": b.actualArtifactTaskCalls, + } +} diff --git a/backend/src/v2/component/constants.go b/backend/src/v2/component/constants.go index f4020263553..313377b828c 100644 --- a/backend/src/v2/component/constants.go +++ b/backend/src/v2/component/constants.go @@ -22,10 +22,6 @@ const ( EnvPodName = "KFP_POD_NAME" EnvPodUID = "KFP_POD_UID" - // Env vars in metadata-grpc-configmap - EnvMetadataHost = "METADATA_GRPC_SERVICE_HOST" - EnvMetadataPort = "METADATA_GRPC_SERVICE_PORT" - // Workspace-related constants WorkspaceVolumeName = "kfp-workspace" WorkspaceMountPath = "/kfp-workspace" diff --git a/backend/src/v2/component/importer_launcher.go b/backend/src/v2/component/importer_launcher.go index fa9a99fd0fb..98d081ad48b 100644 --- a/backend/src/v2/component/importer_launcher.go +++ b/backend/src/v2/component/importer_launcher.go @@ -2,298 +2,325 @@ package component import ( "context" - "encoding/json" "fmt" + "net/url" + "path" "strings" - "github.com/kubeflow/pipelines/backend/src/common/util" - - "github.com/kubeflow/pipelines/backend/src/v2/objectstore" - - pb "github.com/kubeflow/pipelines/third_party/ml-metadata/go/ml_metadata" + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + apiV2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/v2/client_manager" + "google.golang.org/protobuf/types/known/timestamppb" "github.com/golang/glog" - "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" - "github.com/kubeflow/pipelines/backend/src/v2/metadata" - "google.golang.org/protobuf/encoding/protojson" - "k8s.io/client-go/kubernetes" - "k8s.io/client-go/rest" ) -type ImporterLauncherOptions struct { - // required, pipeline context name - PipelineName string - // required, KFP run ID - RunID string - // required, parent DAG execution ID - ParentDagID int64 -} - -func (o *ImporterLauncherOptions) validate() error { - if o == nil { - return fmt.Errorf("empty importer launcher options") - } - if o.PipelineName == "" { - return fmt.Errorf("importer launcher options: pipeline name is empty") - } - if o.RunID == "" { - return fmt.Errorf("importer launcher options: Run ID is empty") - } - if o.ParentDagID == 0 { - return fmt.Errorf("importer launcher options: Parent DAG ID is not provided") - } - return nil -} - type ImportLauncher struct { - component *pipelinespec.ComponentSpec - importer *pipelinespec.PipelineDeploymentConfig_ImporterSpec - task *pipelinespec.PipelineTaskSpec - launcherV2Options LauncherV2Options - importerLauncherOptions ImporterLauncherOptions - - // clients - metadataClient *metadata.Client - k8sClient *kubernetes.Clientset + opts LauncherV2Options + clientManager client_manager.ClientManagerInterface } -func NewImporterLauncher(ctx context.Context, componentSpecJSON, importerSpecJSON, taskSpecJSON string, launcherV2Opts *LauncherV2Options, importerLauncherOpts *ImporterLauncherOptions) (l *ImportLauncher, err error) { +func NewImporterLauncher( + launcherV2Opts *LauncherV2Options, + clientManager client_manager.ClientManagerInterface, +) (l *ImportLauncher, err error) { defer func() { if err != nil { err = fmt.Errorf("failed to create importer launcher: %w", err) } }() - component := &pipelinespec.ComponentSpec{} - err = protojson.Unmarshal([]byte(componentSpecJSON), component) - if err != nil { - return nil, fmt.Errorf("failed to unmarshal component spec: %w", err) - } - importer := &pipelinespec.PipelineDeploymentConfig_ImporterSpec{} - err = protojson.Unmarshal([]byte(importerSpecJSON), importer) - if err != nil { - return nil, fmt.Errorf("failed to unmarshal importer spec: %w", err) - } - task := &pipelinespec.PipelineTaskSpec{} - err = protojson.Unmarshal([]byte(taskSpecJSON), task) - if err != nil { - return nil, fmt.Errorf("failed to unmarshal task spec: %w", err) - } err = launcherV2Opts.validate() if err != nil { return nil, err } - err = importerLauncherOpts.validate() - if err != nil { - return nil, err - } - restConfig, err := rest.InClusterConfig() - if err != nil { - return nil, fmt.Errorf("failed to initialize kubernetes client: %w", err) - } - k8sClient, err := kubernetes.NewForConfig(restConfig) - if err != nil { - return nil, fmt.Errorf("failed to initialize kubernetes client set: %w", err) - } - tlsCfg, err := util.GetTLSConfig(launcherV2Opts.CaCertPath) - if err != nil { - return nil, err - } - metadataClient, err := metadata.NewClient(launcherV2Opts.MLMDServerAddress, launcherV2Opts.MLMDServerPort, tlsCfg) - if err != nil { - return nil, err - } return &ImportLauncher{ - component: component, - importer: importer, - task: task, - launcherV2Options: *launcherV2Opts, - importerLauncherOptions: *importerLauncherOpts, - metadataClient: metadataClient, - k8sClient: k8sClient, + opts: *launcherV2Opts, + clientManager: clientManager, }, nil } -func (l *ImportLauncher) Execute(ctx context.Context) (err error) { +func (l *ImportLauncher) Execute(ctx context.Context) (executionErr error) { defer func() { - if err != nil { - err = fmt.Errorf("failed to execute importer component: %w", err) + if executionErr != nil { + executionErr = fmt.Errorf("failed to execute importer component: %w", executionErr) } }() - // TODO(Bobgy): there's no need to pass any parameters, because pipeline - // and pipeline run context have been created by root DAG driver. - pipeline, err := l.metadataClient.GetPipeline(ctx, l.importerLauncherOptions.PipelineName, l.importerLauncherOptions.RunID, "", "", "", "") - if err != nil { - return err + kfpAPI := l.clientManager.KFPAPIClient() + + // Create the task, we will continue to update this as needed. + parentTaskID := l.opts.ParentTask.GetTaskId() + createdTask, executionErr := kfpAPI.CreateTask(ctx, &apiV2beta1.CreateTaskRequest{ + Task: &apiV2beta1.PipelineTaskDetail{ + Name: l.opts.TaskSpec.GetTaskInfo().GetName(), + DisplayName: l.opts.TaskSpec.GetTaskInfo().GetName(), + RunId: l.opts.Run.RunId, + ParentTaskId: &parentTaskID, + Type: apiV2beta1.PipelineTaskDetail_IMPORTER, + State: apiV2beta1.PipelineTaskDetail_RUNNING, + ScopePath: l.opts.ScopePath.StringPath(), + StartTime: timestamppb.Now(), + CreateTime: timestamppb.Now(), + Pods: []*apiV2beta1.PipelineTaskDetail_TaskPod{ + { + Name: l.opts.PodName, + Uid: l.opts.PodUID, + Type: apiV2beta1.PipelineTaskDetail_EXECUTOR, + }, + }, + }, + }) + if executionErr != nil { + return executionErr } - ecfg := &metadata.ExecutionConfig{ - TaskName: l.task.GetTaskInfo().GetName(), - PodName: l.launcherV2Options.PodName, - PodUID: l.launcherV2Options.PodUID, - Namespace: l.launcherV2Options.Namespace, - ExecutionType: metadata.ImporterExecutionTypeName, - ParentDagID: l.importerLauncherOptions.ParentDagID, + + // The defer statement is used to ensure we propagate any errors + // encountered in this task execution. + defer func() { + if executionErr != nil { + createdTask.State = apiV2beta1.PipelineTaskDetail_FAILED + } else { + createdTask.State = apiV2beta1.PipelineTaskDetail_SUCCEEDED + } + createdTask.EndTime = timestamppb.Now() + _, updateErr := kfpAPI.UpdateTask(ctx, &apiV2beta1.UpdateTaskRequest{ + TaskId: createdTask.TaskId, + Task: createdTask, + }) + if updateErr != nil { + glog.Errorf("failed to update task: %v", updateErr) + return + } + // Propagate any statuses up the DAG. + updateStatusErr := l.clientManager.KFPAPIClient().UpdateStatuses(ctx, l.opts.Run, l.opts.PipelineSpec, l.opts.Task) + if updateStatusErr != nil { + glog.Errorf("failed to update statuses: %v", updateStatusErr) + return + } + }() + + if createdTask == nil { + return fmt.Errorf("failed to create task for importer execution") } - createdExecution, err := l.metadataClient.CreateExecution(ctx, pipeline, ecfg) - if err != nil { - return err + l.opts.Task = createdTask + + if createdTask.Outputs == nil { + createdTask.Outputs = &apiV2beta1.PipelineTaskDetail_InputOutputs{ + Artifacts: make([]*apiV2beta1.PipelineTaskDetail_InputOutputs_IOArtifact, 0), + } + } else if createdTask.Outputs.Artifacts == nil { + createdTask.Outputs.Artifacts = make([]*apiV2beta1.PipelineTaskDetail_InputOutputs_IOArtifact, 0) } - artifact, err := l.findOrNewArtifactToImport(ctx, createdExecution) - if err != nil { - return err + + // Handle artifact creation and links to Importer Task + artifactToImport, executionErr := l.ImportSpecToArtifact() + if executionErr != nil { + return executionErr } - outputArtifactName, err := l.getOutPutArtifactName() - if err != nil { - return err + + // Determine if the Artifact already exists. + preExistingArtifact, executionErr := l.findMatchedArtifact(ctx, artifactToImport) + if executionErr != nil { + return executionErr } - outputArtifact := &metadata.OutputArtifact{ - Name: outputArtifactName, - Artifact: artifact, - Schema: l.component.OutputDefinitions.Artifacts[outputArtifactName].GetArtifactType().GetInstanceSchema(), + + // Get the output artifact name from the component spec. + artifactOutputKey, executionErr := l.getArtifactOutputKey() + if executionErr != nil { + return executionErr } - outputArtifacts := []*metadata.OutputArtifact{outputArtifact} - if err := l.metadataClient.PublishExecution(ctx, createdExecution, nil, outputArtifacts, pb.Execution_COMPLETE); err != nil { - return fmt.Errorf("failed to publish results of importer execution to ML Metadata: %w", err) + + // If reimport is true or the artifact does not already exist we create a new artifact + if l.opts.ImporterSpec.Reimport || preExistingArtifact == nil { + glog.Infof("Creating new artifact for importer task %s", l.opts.TaskSpec.GetTaskInfo().GetName()) + _, executionErr = kfpAPI.CreateArtifact(ctx, &apiV2beta1.CreateArtifactRequest{ + Artifact: artifactToImport, + RunId: l.opts.Run.RunId, + TaskId: createdTask.TaskId, + ProducerKey: artifactOutputKey, + Type: apiV2beta1.IOType_OUTPUT, + }) + if executionErr != nil { + return executionErr + } + } else { + glog.Infof("Reusing existing artifact %s for importer task %s", preExistingArtifact.GetArtifactId(), l.opts.TaskSpec.GetTaskInfo().GetName()) + // If reimporting then we just need to create a new link to this Importer task via + // and ArtifactTask entry. + _, executionErr = kfpAPI.CreateArtifactTask(ctx, &apiV2beta1.CreateArtifactTaskRequest{ + ArtifactTask: &apiV2beta1.ArtifactTask{ + ArtifactId: preExistingArtifact.GetArtifactId(), + TaskId: createdTask.TaskId, + RunId: l.opts.Run.RunId, + Key: artifactOutputKey, + Type: apiV2beta1.IOType_OUTPUT, + Producer: &apiV2beta1.IOProducer{ + TaskName: l.opts.TaskSpec.GetTaskInfo().GetName(), + }, + }, + }) + if executionErr != nil { + return executionErr + } } return nil } -func (l *ImportLauncher) findOrNewArtifactToImport(ctx context.Context, execution *metadata.Execution) (artifact *pb.Artifact, err error) { - // TODO consider moving logic to package metadata so that *pb.Artifact won't get exposed outside of package metadata - artifactToImport, err := l.ImportSpecToMLMDArtifact(ctx) +func (l *ImportLauncher) findMatchedArtifact(ctx context.Context, artifactToMatch *apiV2beta1.Artifact) (matchedArtifact *apiV2beta1.Artifact, err error) { + artifacts, err := l.clientManager.KFPAPIClient().ListArtifactsByURI(ctx, artifactToMatch.GetUri(), l.opts.Namespace) if err != nil { return nil, err } - if l.importer.Reimport { - return artifactToImport, nil + for _, artifact := range artifacts { + if artifact.GetUri() == artifactToMatch.GetUri() { + return artifact, nil + } } - matchedArtifact, err := l.metadataClient.FindMatchedArtifact(ctx, artifactToImport, execution.GetPipeline().GetCtxID()) - if err != nil { - return nil, err + for _, candidateArtifact := range artifacts { + if artifactsAreEqual(artifactToMatch, candidateArtifact) { + return candidateArtifact, nil + } + } + // No match found + return nil, nil +} + +func artifactsAreEqual(artifact1, artifact2 *apiV2beta1.Artifact) bool { + if artifact1.GetType() != artifact2.GetType() { + return false + } + if artifact1.GetUri() != artifact2.GetUri() { + return false } - if matchedArtifact != nil { - return matchedArtifact, nil + if artifact1.GetName() != artifact2.GetName() { + return false } - return artifactToImport, nil + if artifact1.GetDescription() != artifact2.GetDescription() { + return false + } + // Compare metadata fields + metadata1 := artifact1.GetMetadata() + metadata2 := artifact2.GetMetadata() + if len(metadata1) != len(metadata2) { + return false + } + for k, v1 := range metadata1 { + if v2, exists := metadata2[k]; !exists || v1 != v2 { + return false + } + } + return true } -func (l *ImportLauncher) ImportSpecToMLMDArtifact(ctx context.Context) (artifact *pb.Artifact, err error) { +func (l *ImportLauncher) ImportSpecToArtifact() (artifact *apiV2beta1.Artifact, err error) { defer func() { if err != nil { - err = fmt.Errorf("failed to create MLMD artifact from ImporterSpec: %w", err) + err = fmt.Errorf("failed to create Artifact from ImporterSpec: %w", err) } }() - schema, err := getArtifactSchema(l.importer.TypeSchema) - if err != nil { - return nil, fmt.Errorf("failed to get schema from importer spec: %w", err) - } - artifactTypeId, err := l.metadataClient.GetOrInsertArtifactType(ctx, schema) + importerSpec := l.opts.ImporterSpec + artifactType, err := inferArtifactType(importerSpec.GetTypeSchema()) if err != nil { - return nil, fmt.Errorf("failed to get or insert artifact type with schema %s: %w", schema, err) + return nil, fmt.Errorf("failed to get schemaType from importer spec: %w", err) } - // Resolve artifact URI. Can be one of two sources: // 1) Constant // 2) Runtime Parameter + // TODO(Humair): The logic here is very similar to how InputParameters are resolved in the driver's resolver package. + // We should consolidate this logic. var artifactUri string - if l.importer.GetArtifactUri().GetConstant() != nil { - glog.Infof("Artifact URI as constant: %+v", l.importer.GetArtifactUri().GetConstant()) - artifactUri = l.importer.GetArtifactUri().GetConstant().GetStringValue() + switch { + case importerSpec.GetArtifactUri().GetConstant() != nil: + glog.Infof("Artifact URI as constant: %+v", importerSpec.GetArtifactUri().GetConstant()) + artifactUri = importerSpec.GetArtifactUri().GetConstant().GetStringValue() if artifactUri == "" { return nil, fmt.Errorf("empty Artifact URI constant value") } - } else if l.importer.GetArtifactUri().GetRuntimeParameter() != "" { - // When URI is provided using Runtime Parameter, need to retrieve it from dag execution in MLMD - paramName := l.importer.GetArtifactUri().GetRuntimeParameter() - taskInput, ok := l.task.GetInputs().GetParameters()[paramName] + case importerSpec.GetArtifactUri().GetRuntimeParameter() != "": + paramName := importerSpec.GetArtifactUri().GetRuntimeParameter() + taskInput, ok := l.opts.TaskSpec.GetInputs().GetParameters()[paramName] if !ok { return nil, fmt.Errorf("cannot find parameter %s in task input to fetch artifact uri", paramName) } componentInput := taskInput.GetComponentInputParameter() - dag, err := l.metadataClient.GetDAG(ctx, l.importerLauncherOptions.ParentDagID) - if err != nil { - return nil, fmt.Errorf("error retrieving dag execution for parameter %s: %w", paramName, err) - } - glog.Infof("parent DAG: %+v", dag.Execution) - inputParams, _, err := dag.Execution.GetParameters() - if err != nil { - return nil, fmt.Errorf("error retrieving input parameters from dag execution for parameter %s: %w", paramName, err) + var ioParam *apiV2beta1.PipelineTaskDetail_InputOutputs_IOParameter + for _, inputParam := range l.opts.ParentTask.GetInputs().GetParameters() { + if inputParam.ParameterKey == componentInput { + ioParam = inputParam + break + } } - v, ok := inputParams[componentInput] - if !ok { - return nil, fmt.Errorf("error resolving artifact URI: parent DAG does not have input parameter %s", componentInput) + if ioParam == nil { + return nil, fmt.Errorf("cannot find parameter %s in parent task input to fetch artifact uri", componentInput) } - artifactUri = v.GetStringValue() - glog.Infof("Artifact URI from runtime parameter: %s", artifactUri) + artifactUri = ioParam.GetValue().GetStringValue() if artifactUri == "" { return nil, fmt.Errorf("empty artifact URI runtime value for parameter %s", paramName) } - } else { + default: return nil, fmt.Errorf("artifact uri not provided") } - state := pb.Artifact_LIVE - - artifact = &pb.Artifact{ - TypeId: &artifactTypeId, - State: &state, - Uri: &artifactUri, - Properties: make(map[string]*pb.Value), - CustomProperties: make(map[string]*pb.Value), + // TODO(HumairAK): Allow user to specify a canonical artifact Name & Description when importing + // For now we infer the name from the URI object name. + artifactName, err := inferArtifactName(artifactUri) + if err != nil { + return nil, fmt.Errorf("failed to extract filename from artifact uri: %w", err) } - if l.importer.Metadata != nil { - for k, v := range l.importer.Metadata.Fields { - value, err := metadata.StructValueToMLMDValue(v) - if err != nil { - return nil, fmt.Errorf("failed to convert structValue : %w", err) - } - artifact.CustomProperties[k] = value - } + artifact = &apiV2beta1.Artifact{ + Name: artifactName, + Description: "", + Type: artifactType, + Uri: &artifactUri, + CreatedAt: timestamppb.Now(), + Namespace: l.opts.Namespace, + } + if importerSpec.Metadata != nil { + artifact.Metadata = importerSpec.Metadata.GetFields() } - if strings.HasPrefix(artifactUri, "oci://") { - artifactType, err := metadata.SchemaToArtifactType(schema) - if err != nil { - return nil, fmt.Errorf("converting schema to artifact type failed: %w", err) - } - - if *artifactType.Name != "system.Model" { - return nil, fmt.Errorf("the %s artifact type does not support OCI registries", *artifactType.Name) + if artifactType != apiV2beta1.Artifact_Model { + return nil, fmt.Errorf("the %s artifact type does not support OCI registries", apiV2beta1.Artifact_Model) } - return artifact, nil } + return artifact, nil +} - provider, err := objectstore.ParseProviderFromPath(artifactUri) - if err != nil { - return nil, fmt.Errorf("no provider scheme found in artifact URI: %s", artifactUri) +func (l *ImportLauncher) getArtifactOutputKey() (string, error) { + outputNames := make([]string, 0, len(l.opts.ComponentSpec.GetOutputDefinitions().GetArtifacts())) + for name := range l.opts.ComponentSpec.GetOutputDefinitions().GetArtifacts() { + outputNames = append(outputNames, name) } - - // Assume all imported artifacts will rely on execution environment for store provider session info - storeSessionInfo := objectstore.SessionInfo{ - Provider: provider, - Params: map[string]string{ - "fromEnv": "true", - }, + if len(outputNames) != 1 { + return "", fmt.Errorf("failed to extract output artifact name from componentOutputSpec") } - storeSessionInfoJSON, err := json.Marshal(storeSessionInfo) + return outputNames[0], nil +} + +func inferArtifactType(typeSchema *pipelinespec.ArtifactTypeSchema) (apiV2beta1.Artifact_ArtifactType, error) { + schemaType, err := getArtifactSchemaType(typeSchema) if err != nil { - return nil, err + return apiV2beta1.Artifact_TYPE_UNSPECIFIED, fmt.Errorf("failed to get schemaType from importer spec: %w", err) } - storeSessionInfoStr := string(storeSessionInfoJSON) - artifact.CustomProperties["store_session_info"] = metadata.StringValue(storeSessionInfoStr) - return artifact, nil + return artifactTypeSchemaToArtifactType(schemaType) } -func (l *ImportLauncher) getOutPutArtifactName() (string, error) { - outPutNames := make([]string, 0, len(l.component.GetOutputDefinitions().GetArtifacts())) - for name := range l.component.GetOutputDefinitions().GetArtifacts() { - outPutNames = append(outPutNames, name) +func inferArtifactName(uri string) (string, error) { + parsed, err := url.Parse(uri) + if err != nil { + return "", fmt.Errorf("invalid URI: %w", err) } - if len(outPutNames) != 1 { - return "", fmt.Errorf("failed to extract output artifact name from componentOutputSpec") + // For cases like "s3://bucket/path/to/file.txt" + if parsed.Scheme != "" && parsed.Host != "" { + return path.Base(parsed.Path), nil } - return outPutNames[0], nil - + // For "https://minio.local/bucket/path/to/file.txt" + if parsed.Scheme != "" && parsed.Host == "" { + return path.Base(parsed.Path), nil + } + // For URLs without a scheme, e.g. "bucket/path/to/file.txt" + cleaned := strings.TrimSuffix(uri, "/") + return path.Base(cleaned), nil } diff --git a/backend/src/v2/component/interfaces.go b/backend/src/v2/component/interfaces.go new file mode 100644 index 00000000000..80ca007a3e4 --- /dev/null +++ b/backend/src/v2/component/interfaces.go @@ -0,0 +1,166 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package component + +import ( + "context" + "fmt" + "io" + "io/fs" + "os" + "os/exec" + + "github.com/kubeflow/pipelines/backend/src/v2/config" + "github.com/kubeflow/pipelines/backend/src/v2/objectstore" + "gocloud.dev/blob" +) + +// FileSystem provides an interface for file system operations. +// This abstraction allows for easy mocking in tests. +type FileSystem interface { + // MkdirAll creates a directory path and all parents if needed + MkdirAll(path string, perm os.FileMode) error + + // Create creates or truncates the named file + Create(name string) (*os.File, error) + + // ReadFile reads the entire file + ReadFile(name string) ([]byte, error) + + // WriteFile writes data to a file + WriteFile(name string, data []byte, perm os.FileMode) error + + // Stat returns file info + Stat(name string) (fs.FileInfo, error) +} + +// CommandExecutor provides an interface for executing system commands. +// This abstraction allows for easy mocking in tests. +type CommandExecutor interface { + // Run executes a command with the given arguments and I/O streams + Run(ctx context.Context, cmd string, args []string, stdin io.Reader, stdout, stderr io.Writer) error +} + +// ObjectStoreClientInterface provides an interface for object store operations. +// This abstraction allows for easy mocking in tests. +type ObjectStoreClientInterface interface { + // UploadArtifact uploads an artifact from local path to remote URI + UploadArtifact(ctx context.Context, localPath, remoteURI, artifactKey string) error + + // DownloadArtifact downloads an artifact from remote URI to local path + DownloadArtifact(ctx context.Context, remoteURI, localPath, artifactKey string) error +} + +// OSFileSystem is the production implementation of FileSystem using real os calls +type OSFileSystem struct{} + +func (f *OSFileSystem) MkdirAll(path string, perm os.FileMode) error { + return os.MkdirAll(path, perm) +} + +func (f *OSFileSystem) Create(name string) (*os.File, error) { + return os.Create(name) +} + +func (f *OSFileSystem) ReadFile(name string) ([]byte, error) { + return os.ReadFile(name) +} + +func (f *OSFileSystem) WriteFile(name string, data []byte, perm os.FileMode) error { + return os.WriteFile(name, data, perm) +} + +func (f *OSFileSystem) Stat(name string) (fs.FileInfo, error) { + return os.Stat(name) +} + +// RealCommandExecutor is the production implementation of CommandExecutor +type RealCommandExecutor struct{} + +func (e *RealCommandExecutor) Run(ctx context.Context, cmd string, args []string, stdin io.Reader, stdout, stderr io.Writer) error { + command := exec.Command(cmd, args...) + command.Stdin = stdin + command.Stdout = stdout + command.Stderr = stderr + return command.Run() +} + +// ObjectStoreClient is the production implementation using the actual objectstore package +type ObjectStoreClient struct { + launcher *LauncherV2 +} + +func NewObjectStoreClient(launcher *LauncherV2) *ObjectStoreClient { + return &ObjectStoreClient{launcher: launcher} +} + +func (c *ObjectStoreClient) UploadArtifact(ctx context.Context, localPath, remoteURI, artifactKey string) error { + openedBucket, blobKey, err := c.getBucket(ctx, artifactKey, remoteURI, c.launcher.launcherConfig) + if err != nil { + return fmt.Errorf("failed to get opened bucket for output artifact %q: %w", artifactKey, err) + } + uploadErr := objectstore.UploadBlob(ctx, openedBucket, localPath, blobKey) + if uploadErr != nil { + return fmt.Errorf("failed to upload output artifact %q: %w", artifactKey, uploadErr) + } + return nil +} + +func (c *ObjectStoreClient) DownloadArtifact(ctx context.Context, remoteURI, localPath, artifactKey string) error { + openedBucket, blobKey, err := c.getBucket(ctx, artifactKey, remoteURI, c.launcher.launcherConfig) + if err != nil { + return fmt.Errorf("failed to get opened bucket for input artifact %q: %w", artifactKey, err) + } + if err = objectstore.DownloadBlob(ctx, openedBucket, localPath, blobKey); err != nil { + return fmt.Errorf("failed to download input artifact %q from remote storage URI %q: %w", artifactKey, remoteURI, err) + } + return err +} + +func (c *ObjectStoreClient) getBucket( + ctx context.Context, + artifactKey, + artifactURI string, + launcherConfig *config.Config, +) (*blob.Bucket, string, error) { + prefix, base, err := objectstore.SplitObjectURI(artifactURI) + if err != nil { + return nil, "", fmt.Errorf("failed to get base URI path for input artifact %q: %w", artifactKey, err) + } + bucketConfig, err := objectstore.ParseBucketPathToConfig(prefix) + if err != nil { + return nil, "", fmt.Errorf("failed to get base URI path for input artifact %q: %w", artifactKey, err) + } + + key := bucketConfig.Hash() + var openedBucket *blob.Bucket + if cachedBucket, exists := c.launcher.openedBucketCache[key]; exists { + openedBucket = cachedBucket + } else { + // Create new opened bucket and store in cache + storeSessionInfo, err := launcherConfig.GetStoreSessionInfo(bucketConfig.PrefixedBucket()) + if err != nil { + return nil, "", fmt.Errorf("failed to get store session info for bucket %q: %w", bucketConfig.PrefixedBucket(), err) + } + newOpenBucket, err := objectstore.OpenBucket(ctx, c.launcher.clientManager.K8sClient(), c.launcher.options.Namespace, bucketConfig, &storeSessionInfo) + if err != nil { + return nil, "", fmt.Errorf("failed to open bucket %q: %w", bucketConfig.PrefixedBucket(), err) + } + c.launcher.openedBucketCache[bucketConfig.Hash()] = newOpenBucket + openedBucket = newOpenBucket + } + + return openedBucket, base, nil +} diff --git a/backend/src/v2/component/launcher_v2.go b/backend/src/v2/component/launcher_v2.go index 8bf1f4dafcc..8bab34c3589 100644 --- a/backend/src/v2/component/launcher_v2.go +++ b/backend/src/v2/component/launcher_v2.go @@ -11,77 +11,80 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. + package component import ( "bytes" "context" "encoding/json" - "errors" "fmt" "io" "os" - "os/exec" "path/filepath" "strconv" "strings" "time" - "google.golang.org/protobuf/types/known/timestamppb" - "github.com/golang/glog" - api "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" - "github.com/kubeflow/pipelines/backend/src/v2/client_manager" - "google.golang.org/protobuf/proto" - "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" - "github.com/kubeflow/pipelines/backend/src/v2/metadata" - "github.com/kubeflow/pipelines/backend/src/v2/objectstore" - pb "github.com/kubeflow/pipelines/third_party/ml-metadata/go/ml_metadata" + apiV2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/backend/src/v2/client_manager" + "github.com/kubeflow/pipelines/backend/src/v2/config" "gocloud.dev/blob" "google.golang.org/protobuf/encoding/protojson" "google.golang.org/protobuf/types/known/structpb" - "k8s.io/client-go/kubernetes" + "google.golang.org/protobuf/types/known/timestamppb" ) type LauncherV2Options struct { - Namespace, - PodName, - PodUID, - MLMDServerAddress, - MLMDServerPort, - PipelineName, - RunID string - PublishLogs string - CacheDisabled bool + Namespace string + PodName string + PodUID string + PipelineName string + PublishLogs string + CachedFingerprint string + CacheDisabled bool + IterationIndex *int64 + ComponentSpec *pipelinespec.ComponentSpec + ImporterSpec *pipelinespec.PipelineDeploymentConfig_ImporterSpec + PipelineSpec *structpb.Struct + TaskSpec *pipelinespec.PipelineTaskSpec + ScopePath util.ScopePath + Run *apiV2beta1.Run + ParentTask *apiV2beta1.PipelineTaskDetail + Task *apiV2beta1.PipelineTaskDetail // Set to true if apiserver is serving over TLS MLPipelineTLSEnabled bool - // Set to true if metadata server is serving over TLS - MLMDTLSEnabled bool - CaCertPath string + CaCertPath string } type LauncherV2 struct { - executionID int64 executorInput *pipelinespec.ExecutorInput - component *pipelinespec.ComponentSpec command string args []string options LauncherV2Options clientManager client_manager.ClientManagerInterface -} - -// Client is the struct to hold the Kubernetes Clientset -type kubernetesClient struct { - Clientset kubernetes.Interface + // Maintaining a cache of opened buckets will minimize + // the number of calls to the object store, and api server + openedBucketCache map[string]*blob.Bucket + launcherConfig *config.Config + pipelineSpec *structpb.Struct + + // BatchUpdater collects API updates and flushes them in batches + // to reduce database round-trips + batchUpdater *BatchUpdater + + // Dependency interfaces for testing + fileSystem FileSystem + cmdExecutor CommandExecutor + objectStore ObjectStoreClientInterface } // NewLauncherV2 is a factory function that returns an instance of LauncherV2. func NewLauncherV2( - ctx context.Context, - executionID int64, - executorInputJSON, - componentSpecJSON string, + executorInputJSON string, cmdArgs []string, opts *LauncherV2Options, clientManager client_manager.ClientManagerInterface, @@ -91,19 +94,12 @@ func NewLauncherV2( err = fmt.Errorf("failed to create component launcher v2: %w", err) } }() - if executionID == 0 { - return nil, fmt.Errorf("must specify execution ID") - } + executorInput := &pipelinespec.ExecutorInput{} err = protojson.Unmarshal([]byte(executorInputJSON), executorInput) if err != nil { return nil, fmt.Errorf("failed to unmarshal executor input: %w", err) } - component := &pipelinespec.ComponentSpec{} - err = protojson.Unmarshal([]byte(componentSpecJSON), component) - if err != nil { - return nil, fmt.Errorf("failed to unmarshal component spec: %w\ncomponentSpec: %v", err, prettyPrint(componentSpecJSON)) - } if len(cmdArgs) == 0 { return nil, fmt.Errorf("command and arguments are empty") } @@ -111,15 +107,42 @@ func NewLauncherV2( if err != nil { return nil, err } - return &LauncherV2{ - executionID: executionID, + + launcher := &LauncherV2{ executorInput: executorInput, - component: component, command: cmdArgs[0], args: cmdArgs[1:], options: *opts, clientManager: clientManager, - }, nil + batchUpdater: NewBatchUpdater(), + // Initialize with production implementations + fileSystem: &OSFileSystem{}, + cmdExecutor: &RealCommandExecutor{}, + openedBucketCache: make(map[string]*blob.Bucket), + pipelineSpec: opts.PipelineSpec, + } + + // Object store is initialized after launcher creation + launcher.objectStore = NewObjectStoreClient(launcher) + return launcher, nil +} + +// WithFileSystem allows overriding the file system (for testing) +func (l *LauncherV2) WithFileSystem(fs FileSystem) *LauncherV2 { + l.fileSystem = fs + return l +} + +// WithCommandExecutor allows overriding the command executor (for testing) +func (l *LauncherV2) WithCommandExecutor(executor CommandExecutor) *LauncherV2 { + l.cmdExecutor = executor + return l +} + +// WithObjectStore allows overriding the object store client (for testing) +func (l *LauncherV2) WithObjectStore(store ObjectStoreClientInterface) *LauncherV2 { + l.objectStore = store + return l } // stopWaitingArtifacts will create empty files to tell Modelcar sidecar containers to stop. Any errors encountered are @@ -160,106 +183,90 @@ func stopWaitingArtifacts(artifacts map[string]*pipelinespec.ArtifactList) { } } -// Execute calls executeV2, updates the cache, and publishes the results to MLMD. -func (l *LauncherV2) Execute(ctx context.Context) (err error) { +// Execute calls executeV2, updates the cache, and creates artifacts for outputs. +func (l *LauncherV2) Execute(ctx context.Context) (executionErr error) { defer func() { - if err != nil { - err = fmt.Errorf("failed to execute component: %w", err) + if executionErr != nil { + executionErr = fmt.Errorf("failed to execute component: %w", executionErr) } }() - defer stopWaitingArtifacts(l.executorInput.GetInputs().GetArtifacts()) + l.options.Task.Pods = append(l.options.Task.Pods, &apiV2beta1.PipelineTaskDetail_TaskPod{ + Name: l.options.PodName, + Uid: l.options.PodUID, + Type: apiV2beta1.PipelineTaskDetail_EXECUTOR, + }) - // publish execution regardless the task succeeds or not - var execution *metadata.Execution - var executorOutput *pipelinespec.ExecutorOutput - var outputArtifacts []*metadata.OutputArtifact - status := pb.Execution_FAILED + // Defer the final task status update to ensure we handle and propagate errors. defer func() { - if execution == nil { - glog.Errorf("Skipping publish since execution is nil. Original err is: %v", err) - return + if executionErr != nil { + l.options.Task.State = apiV2beta1.PipelineTaskDetail_FAILED + l.options.Task.StatusMetadata = &apiV2beta1.PipelineTaskDetail_StatusMetadata{ + Message: executionErr.Error(), + } } - - if perr := l.publish(ctx, execution, executorOutput, outputArtifacts, status); perr != nil { - if err != nil { - err = fmt.Errorf("failed to publish execution with error %s after execution failed: %s", perr.Error(), err.Error()) - } else { - err = perr + l.options.Task.EndTime = timestamppb.New(time.Now()) + // Queue the final task status update + l.batchUpdater.QueueTaskUpdate(l.options.Task) + + // Flush all batched updates (artifacts, artifact-tasks, task updates) + // This executes all queued operations that were accumulated during: + // - uploadOutputArtifacts (artifact creation) + // - executeV2 (task output parameter update) + // - propagateOutputsUpDAG (artifact-task creation, parent task parameter updates) + // - this final SUCCEEDED status update + if flushErr := l.batchUpdater.Flush(ctx, l.clientManager.KFPAPIClient()); flushErr != nil { + l.options.Task.State = apiV2beta1.PipelineTaskDetail_FAILED + glog.Errorf("failed to flush batch updates: %v", flushErr) + _, updateTaskErr := l.clientManager.KFPAPIClient().UpdateTask(ctx, &apiV2beta1.UpdateTaskRequest{TaskId: l.options.Task.GetTaskId(), Task: l.options.Task}) + if updateTaskErr != nil { + glog.Errorf("failed to update task status: %v", updateTaskErr) + // Return here, if we can't update this Task's status, then there's no point in proceeding. + // This should never happen. + return } + // Do not return on flush error, we want to propagate the error to the upstream tasks. } - glog.Infof("publish success.") - // At the end of the current task, we check the statuses of all tasks in - // the current DAG and update the DAG's status accordingly. - dag, err := l.clientManager.MetadataClient().GetDAG(ctx, execution.GetExecution().CustomProperties["parent_dag_id"].GetIntValue()) - if err != nil { - glog.Errorf("DAG Status Update: failed to get DAG: %s", err.Error()) + // Refresh run before updating statuses + fullView := apiV2beta1.GetRunRequest_FULL + refreshedRun, getRunErr := l.clientManager.KFPAPIClient().GetRun(ctx, &apiV2beta1.GetRunRequest{RunId: l.options.Run.GetRunId(), View: &fullView}) + if getRunErr != nil { + glog.Errorf("failed to refresh run: %w", getRunErr) + return } - pipeline, _ := l.clientManager.MetadataClient().GetPipelineFromExecution(ctx, execution.GetID()) - err = l.clientManager.MetadataClient().UpdateDAGExecutionsState(ctx, dag, pipeline) - if err != nil { - glog.Errorf("failed to update DAG state: %s", err.Error()) + l.options.Run = refreshedRun + updateStatusErr := l.clientManager.KFPAPIClient().UpdateStatuses(ctx, l.options.Run, l.pipelineSpec, l.options.Task) + if updateStatusErr != nil { + glog.Errorf("failed to update statuses: %w", updateStatusErr) + return } }() - executedStartedTime := time.Now().Unix() - execution, err = l.prePublish(ctx) - if err != nil { - return err - } - fingerPrint := execution.FingerPrint() - storeSessionInfo, err := objectstore.GetSessionInfoFromString(execution.GetPipeline().GetStoreSessionInfo()) - if err != nil { - return err - } - pipelineRoot := execution.GetPipeline().GetPipelineRoot() - bucketConfig, err := objectstore.ParseBucketConfig(pipelineRoot, storeSessionInfo) - if err != nil { - return err - } - bucket, err := objectstore.OpenBucket(ctx, l.clientManager.K8sClient(), l.options.Namespace, bucketConfig) - if err != nil { - return err - } - if err = prepareOutputFolders(l.executorInput); err != nil { - return err - } - executorOutput, outputArtifacts, err = executeV2( - ctx, - l.executorInput, - l.component, - l.command, - l.args, - bucket, - bucketConfig, - l.clientManager.MetadataClient(), - l.options.Namespace, - l.clientManager.K8sClient(), - l.options.PublishLogs, - l.options.CaCertPath, - ) - if err != nil { - return err - } - status = pb.Execution_COMPLETE - // if fingerPrint is not empty, it means this task enables cache but it does not hit cache, we need to create cache entry for this task - if fingerPrint != "" { - id := execution.GetID() - if id == 0 { - return fmt.Errorf("failed to get id from createdExecution") - } - task := &api.Task{ - // TODO how to differentiate between shared pipeline and namespaced pipeline - PipelineName: "pipeline/" + l.options.PipelineName, - Namespace: l.options.Namespace, - RunId: l.options.RunID, - MlmdExecutionID: strconv.FormatInt(id, 10), - CreatedAt: timestamppb.New(time.Unix(executedStartedTime, 0)), - FinishedAt: timestamppb.New(time.Unix(time.Now().Unix(), 0)), - Fingerprint: fingerPrint, + + defer stopWaitingArtifacts(l.executorInput.GetInputs().GetArtifacts()) + + // Close any open buckets in the cache + defer func() { + for _, bucket := range l.openedBucketCache { + _ = bucket.Close() } - return l.clientManager.CacheClient().CreateExecutionCache(ctx, task) + }() + + // Fetch Launcher config and initialize KFP API client if not already set (testing mode) + // Production path: fetch real config and create real client + launcherConfig, executionErr := config.FetchLauncherConfigMap(ctx, l.clientManager.K8sClient(), l.options.Namespace) + if executionErr != nil { + return fmt.Errorf("failed to get launcher configmap: %w", executionErr) } + l.launcherConfig = launcherConfig + if executionErr = l.prepareOutputFolders(l.executorInput); executionErr != nil { + return fmt.Errorf("failed to prepare output folders: %w", executionErr) + } + _, executionErr = l.executeV2(ctx) + if executionErr != nil { + return fmt.Errorf("failed to execute component: %w", executionErr) + } + l.options.Task.State = apiV2beta1.PipelineTaskDetail_SUCCEEDED return nil } @@ -277,7 +284,6 @@ func (l *LauncherV2) Info() string { func (o *LauncherV2Options) validate() error { empty := func(s string) bool { return len(s) == 0 } err := func(s string) error { return fmt.Errorf("invalid launcher options: must specify %s", s) } - if empty(o.Namespace) { return err("Namespace") } @@ -287,138 +293,96 @@ func (o *LauncherV2Options) validate() error { if empty(o.PodUID) { return err("PodUID") } - if empty(o.MLMDServerAddress) { - return err("MLMDServerAddress") + if o.PipelineName == "" { + return err("PipelineName") } - if empty(o.MLMDServerPort) { - return err("MLMDServerPort") + if o.PipelineSpec == nil { + return err("PipelineSpec") } return nil } -// publish pod info to MLMD, before running user command -func (l *LauncherV2) prePublish(ctx context.Context) (execution *metadata.Execution, err error) { - defer func() { - if err != nil { - err = fmt.Errorf("failed to pre-publish Pod info to ML Metadata: %w", err) - } - }() - execution, err = l.clientManager.MetadataClient().GetExecution(ctx, l.executionID) +// executeV2 handles placeholder substitution for inputs, calls execute to +// execute end user logic, and uploads the resulting output Artifacts. +func (l *LauncherV2) executeV2(ctx context.Context) (*pipelinespec.ExecutorOutput, error) { + // Fill in placeholders with runtime values. + compiledCmd, compiledArgs, err := compileCmdAndArgs(l.executorInput, l.command, l.args) if err != nil { return nil, err } - ecfg := &metadata.ExecutionConfig{ - PodName: l.options.PodName, - PodUID: l.options.PodUID, - Namespace: l.options.Namespace, - } - return l.clientManager.MetadataClient().PrePublishExecution(ctx, execution, ecfg) -} -// TODO(Bobgy): consider passing output artifacts info from executor output. -func (l *LauncherV2) publish( - ctx context.Context, - execution *metadata.Execution, - executorOutput *pipelinespec.ExecutorOutput, - outputArtifacts []*metadata.OutputArtifact, - status pb.Execution_State, -) (err error) { - if execution == nil { - return fmt.Errorf("failed to publish results to ML Metadata: execution is nil") + executorOutput, err := l.execute(ctx, compiledCmd, compiledArgs) + if err != nil { + return nil, err } - var outputParameters map[string]*structpb.Value - if executorOutput != nil { - outputParameters = executorOutput.GetParameterValues() + // These are not added in execute(), because execute() is shared between v2 compatible and v2 engine launcher. + // In v2 compatible mode, we get output parameter info from runtimeInfo. In v2 engine, we get it from component spec. + // Because of the difference, we cannot put parameter collection logic in one method. + err = l.collectOutputParameters(executorOutput) + if err != nil { + return nil, err } - // TODO(Bobgy): upload output artifacts. - // TODO(Bobgy): when adding artifacts, we will need execution.pipeline to be non-nil, because we need - // to publish output artifacts to the context too. - // return l.metadataClient.PublishExecution(ctx, execution, outputParameters, outputArtifacts, pb.Execution_COMPLETE) - err = l.clientManager.MetadataClient().PublishExecution(ctx, execution, outputParameters, outputArtifacts, status) + // Upload artifacts from local disk to remote store. + err = l.uploadOutputArtifacts(ctx, executorOutput) if err != nil { - return fmt.Errorf("failed to publish results to ML Metadata: %w", err) + return nil, err } - return nil -} + // Update task outputs for parameters before propagation + if executorOutput != nil && len(executorOutput.GetParameterValues()) > 0 { + params := make([]*apiV2beta1.PipelineTaskDetail_InputOutputs_IOParameter, 0, len(executorOutput.GetParameterValues())) + for key, val := range executorOutput.GetParameterValues() { + param := &apiV2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + ParameterKey: key, + Type: apiV2beta1.IOType_OUTPUT, + Value: val, + Producer: &apiV2beta1.IOProducer{ + TaskName: l.options.TaskSpec.GetTaskInfo().GetName(), + }} + if l.options.IterationIndex != nil { + param.Producer.Iteration = l.options.IterationIndex + param.Type = apiV2beta1.IOType_ITERATOR_OUTPUT + } + params = append(params, param) + } -// executeV2 handles placeholder substitution for inputs, calls execute to -// execute end user logic, and uploads the resulting output Artifacts. -func executeV2( - ctx context.Context, - executorInput *pipelinespec.ExecutorInput, - component *pipelinespec.ComponentSpec, - cmd string, - args []string, - bucket *blob.Bucket, - bucketConfig *objectstore.Config, - metadataClient metadata.ClientInterface, - namespace string, - k8sClient kubernetes.Interface, - publishLogs string, - customCAPath string, -) (*pipelinespec.ExecutorOutput, []*metadata.OutputArtifact, error) { - - // Add parameter default values to executorInput, if there is not already a user input. - // This process is done in the launcher because we let the component resolve default values internally. - // Variable executorInputWithDefault is a copy so we don't alter the original data. - executorInputWithDefault, err := addDefaultParams(executorInput, component) - if err != nil { - return nil, nil, err + l.options.Task.Outputs = &apiV2beta1.PipelineTaskDetail_InputOutputs{Parameters: params} + // Queue task update instead of executing immediately + l.batchUpdater.QueueTaskUpdate(l.options.Task) } - // Fill in placeholders with runtime values. - compiledCmd, compiledArgs, err := compileCmdAndArgs(executorInputWithDefault, cmd, args) - if err != nil { - return nil, nil, err - } - - executorOutput, err := execute( - ctx, - executorInput, - compiledCmd, - compiledArgs, - bucket, - bucketConfig, - namespace, - k8sClient, - publishLogs, - customCAPath, - ) - if err != nil { - return nil, nil, err + // Flush artifacts and task parameter updates BEFORE propagation + // This ensures that when propagateOutputsUpDAG refreshes the task, + // the artifacts will exist in the API and can be propagated up the DAG + if err = l.batchUpdater.Flush(ctx, l.clientManager.KFPAPIClient()); err != nil { + return nil, fmt.Errorf("failed to flush artifacts before propagation: %w", err) } - // These are not added in execute(), because execute() is shared between v2 compatible and v2 engine launcher. - // In v2 compatible mode, we get output parameter info from runtimeInfo. In v2 engine, we get it from component spec. - // Because of the difference, we cannot put parameter collection logic in one method. - err = collectOutputParameters(executorInput, executorOutput, component) + + // Propagate outputs up the DAG hierarchy for parents that declare these outputs + err = l.propagateOutputsUpDAG(ctx) if err != nil { - return nil, nil, err + return nil, err } - // TODO(Bobgy): should we log metadata per each artifact, or batched after uploading all artifacts. - outputArtifacts, err := uploadOutputArtifacts(ctx, executorInput, executorOutput, uploadOutputArtifactsOptions{ - bucketConfig: bucketConfig, - bucket: bucket, - metadataClient: metadataClient, - }) - if err != nil { - return nil, nil, err + + // Flush propagation updates (artifact-tasks and parent task parameter updates) + // so that propagated outputs are visible to subsequent driver calls + if err = l.batchUpdater.Flush(ctx, l.clientManager.KFPAPIClient()); err != nil { + return nil, fmt.Errorf("failed to flush propagation updates: %w", err) } - // TODO(Bobgy): only return executor output. Merge info in output artifacts - // to executor output. - return executorOutput, outputArtifacts, nil + + return executorOutput, nil } // collectOutputParameters collect output parameters from local disk and add them // to executor output. -func collectOutputParameters(executorInput *pipelinespec.ExecutorInput, executorOutput *pipelinespec.ExecutorOutput, component *pipelinespec.ComponentSpec) error { +func (l *LauncherV2) collectOutputParameters(executorOutput *pipelinespec.ExecutorOutput) error { if executorOutput.ParameterValues == nil { executorOutput.ParameterValues = make(map[string]*structpb.Value) } outputParameters := executorOutput.GetParameterValues() - for name, param := range executorInput.GetOutputs().GetParameters() { + for name, param := range l.executorInput.GetOutputs().GetParameters() { _, ok := outputParameters[name] if ok { // If the output parameter was already specified in output metadata file, @@ -426,18 +390,18 @@ func collectOutputParameters(executorInput *pipelinespec.ExecutorInput, executor // the highest priority. continue } - paramSpec, ok := component.GetOutputDefinitions().GetParameters()[name] + paramSpec, ok := l.options.ComponentSpec.GetOutputDefinitions().GetParameters()[name] if !ok { return fmt.Errorf("failed to find output parameter name=%q in component spec", name) } msg := func(err error) error { return fmt.Errorf("failed to read output parameter name=%q type=%q path=%q: %w", name, paramSpec.GetParameterType(), param.GetOutputFile(), err) } - b, err := os.ReadFile(param.GetOutputFile()) + b, err := l.fileSystem.ReadFile(param.GetOutputFile()) if err != nil { return msg(err) } - value, err := metadata.TextToPbValue(string(b), paramSpec.GetParameterType()) + value, err := textToPbValue(string(b), paramSpec.GetParameterType()) if err != nil { return msg(err) } @@ -487,40 +451,49 @@ func getLogWriter(artifacts map[string]*pipelinespec.ArtifactList) (writer io.Wr return io.MultiWriter(os.Stdout, logFile) } +// ExecuteForTesting is a test-only method that executes the launcher with mocked dependencies. +// It runs the full execution flow including artifact uploads but uses the provided mock dependencies. +// This method should only be used in tests. +func (l *LauncherV2) ExecuteForTesting(ctx context.Context) (*pipelinespec.ExecutorOutput, error) { + return l.executeV2(ctx) +} + // execute downloads input artifacts, prepares the execution environment, // executes the end user code, and returns the outputs. -func execute( +func (l *LauncherV2) execute( ctx context.Context, - executorInput *pipelinespec.ExecutorInput, cmd string, args []string, - bucket *blob.Bucket, - bucketConfig *objectstore.Config, - namespace string, - k8sClient kubernetes.Interface, - publishLogs string, - customCAPath string, ) (*pipelinespec.ExecutorOutput, error) { - if err := downloadArtifacts(ctx, executorInput, bucket, bucketConfig, namespace, k8sClient); err != nil { + + // Used for local debugging. + customOutputFile := os.Getenv("KFP_OUTPUT_FILE") + if customOutputFile != "" { + return l.getExecutorOutputFile(customOutputFile) + } + + if err := l.downloadArtifacts(ctx); err != nil { return nil, err } - if err := prepareOutputFolders(executorInput); err != nil { + if err := l.prepareOutputFolders(l.executorInput); err != nil { return nil, err } var writer io.Writer - if publishLogs == "true" { - writer = getLogWriter(executorInput.Outputs.GetArtifacts()) + if l.options.PublishLogs == "true" { + writer = getLogWriter(l.executorInput.Outputs.GetArtifacts()) } else { writer = os.Stdout } + defer glog.Flush() + // If a custom CA path is input, append to system CA and save to a temp file for executor access. - if customCAPath != "" { + if l.options.CaCertPath != "" { var caBundleTmpPath string var err error - if caBundleTmpPath, err = compileTempCABundleWithCustomCA(customCAPath); err != nil { + if caBundleTmpPath, err = compileTempCABundleWithCustomCA(l.options.CaCertPath); err != nil { return nil, err } @@ -536,23 +509,14 @@ func execute( if err != nil { glog.Errorf("Error setting SSL_CERT_FILE environment variable, %s", err.Error()) } - } - // Prepare command that will execute end user code. - command := exec.Command(cmd, args...) - command.Stdin = os.Stdin - // Pipe stdout/stderr to the aforementioned multiWriter. - command.Stdout = writer - command.Stderr = writer - defer glog.Flush() - - // Execute end user code. - if err := command.Run(); err != nil { + // Execute end user code using the command executor interface. + if err := l.cmdExecutor.Run(ctx, cmd, args, os.Stdin, writer, writer); err != nil { return nil, err } - return getExecutorOutputFile(executorInput.GetOutputs().GetOutputFile()) + return l.getExecutorOutputFile(l.executorInput.GetOutputs().GetOutputFile()) } // Create a temp file that contains the system CA bundle (and custom CA if it has been mounted). @@ -605,212 +569,669 @@ func compileTempCABundleWithCustomCA(customCAPath string) (string, error) { return tmpCaBundle.Name(), nil } -type uploadOutputArtifactsOptions struct { - bucketConfig *objectstore.Config - bucket *blob.Bucket - metadataClient metadata.ClientInterface -} - -func uploadOutputArtifacts(ctx context.Context, executorInput *pipelinespec.ExecutorInput, executorOutput *pipelinespec.ExecutorOutput, opts uploadOutputArtifactsOptions) ([]*metadata.OutputArtifact, error) { - // Register artifacts with MLMD. - outputArtifacts := make([]*metadata.OutputArtifact, 0, len(executorInput.GetOutputs().GetArtifacts())) - for name, artifactList := range executorInput.GetOutputs().GetArtifacts() { - if len(artifactList.Artifacts) == 0 { - continue +// uploadOutputArtifacts iterates over all the Artifacts retrieved from the +// executor output and uploads them to the object store and registers them +// with the KFP API. +func (l *LauncherV2) uploadOutputArtifacts( + ctx context.Context, + executorOutput *pipelinespec.ExecutorOutput, +) error { + // Manage an opened bucket cache to minimize pool + var openedBucketCache = map[string]*blob.Bucket{} + defer func() { + for _, bucket := range openedBucketCache { + _ = bucket.Close() } + }() + // After successful execution and uploads, record outputs in KFP API + // Create artifactsMap for each output port + artifactsMap := map[string][]*apiV2beta1.Artifact{} + for artifactKey, artifactList := range l.executorInput.GetOutputs().GetArtifacts() { + artifactsMap[artifactKey] = []*apiV2beta1.Artifact{} for _, outputArtifact := range artifactList.Artifacts { - glog.Infof("outputArtifact in uploadOutputArtifacts call: ", outputArtifact.Name) - + glog.Infof("outputArtifact in uploadOutputArtifacts call: %s", outputArtifact.Name) // Merge executor output artifact info with executor input - if list, ok := executorOutput.Artifacts[name]; ok && len(list.Artifacts) > 0 { + if list, ok := executorOutput.Artifacts[artifactKey]; ok && len(list.Artifacts) > 0 { mergeRuntimeArtifacts(list.Artifacts[0], outputArtifact) } + // OCI artifactsMap are accessed via shared storage of a Modelcar + if strings.HasPrefix(outputArtifact.Uri, "oci://") { + continue + } - // Upload artifacts from local path to remote storages. - localDir, err := retrieveArtifactPath(outputArtifact) + artifactType, err := inferArtifactType(outputArtifact.GetType()) if err != nil { - glog.Warningf("Output Artifact %q does not have a recognized storage URI %q. Skipping uploading to remote storage.", name, outputArtifact.Uri) - } else if !strings.HasPrefix(outputArtifact.Uri, "oci://") { - blobKey, err := opts.bucketConfig.KeyFromURI(outputArtifact.Uri) - if err != nil { - return nil, fmt.Errorf("failed to upload output artifact %q: %w", name, err) + return fmt.Errorf("failed to infer artifact type for port %s: %w", artifactKey, err) + } + + // Metric artifacts don't have a URI, only a numberValue + if artifactType == apiV2beta1.Artifact_Metric { + // Each key/value pair in `metadata` equates to a new Artifact + for key, value := range outputArtifact.GetMetadata().GetFields() { + numVal, ok := value.Kind.(*structpb.Value_NumberValue) + if !ok { + return fmt.Errorf("metric value %q must be a number, got %T", key, value.Kind) + } + artifact := &apiV2beta1.Artifact{ + Name: key, + Description: "", + Type: artifactType, + NumberValue: &numVal.NumberValue, + CreatedAt: timestamppb.Now(), + // Continue to retain the artifact in metadata for backwards compatibility. + Metadata: map[string]*structpb.Value{ + key: value, + }, + Namespace: l.options.Namespace, + } + artifactsMap[artifactKey] = append(artifactsMap[artifactKey], artifact) } - if err := objectstore.UploadBlob(ctx, opts.bucket, localDir, blobKey); err != nil { - // We allow components to not produce output files - if errors.Is(err, os.ErrNotExist) { - glog.Warningf("Local filepath %q does not exist", localDir) - } else { - return nil, fmt.Errorf("failed to upload output artifact %q to remote storage URI %q: %w", name, outputArtifact.Uri, err) + } else { + // In this case we can still encounter metrics of type ClassificationMetric or SlicedClassificationMetric + // which do not have a numberValue, but nor do they have a URI, their values are stored only in metadata. + artifact := &apiV2beta1.Artifact{ + Name: outputArtifact.GetName(), + Description: "", + Type: artifactType, + Metadata: outputArtifact.GetMetadata().GetFields(), + CreatedAt: timestamppb.Now(), + Namespace: l.options.Namespace, + } + + // In the Classification metric case, the metric data is stored in metadata and + // not object store + isNotAMetric := apiV2beta1.Artifact_ClassificationMetric != artifactType && + apiV2beta1.Artifact_SlicedClassificationMetric != artifactType + + // If the artifact is not a metric, upload it to the object store and store the URI in the artifact + if isNotAMetric { + localPath, err := retrieveArtifactPath(outputArtifact) + if err != nil { + glog.Warningf("Output Artifact %q does not have a recognized storage URI %q. Skipping uploading to remote storage.", + artifactKey, outputArtifact.Uri) + } + err = l.objectStore.UploadArtifact(ctx, localPath, outputArtifact.Uri, artifactKey) + if err != nil { + return fmt.Errorf("failed to upload output artifact %q to remote storage URI %q: %w", artifactKey, outputArtifact.Uri, err) } + artifact.Uri = util.StringPointer(outputArtifact.Uri) } + + artifactsMap[artifactKey] = []*apiV2beta1.Artifact{artifact} } + } + } - // Write out the metadata. - metadataErr := func(err error) error { - return fmt.Errorf("unable to produce MLMD artifact for output %q: %w", name, err) + // Queue artifact creation requests (will be flushed in batch) + for artifactKey, artifacts := range artifactsMap { + for _, artifact := range artifacts { + request := &apiV2beta1.CreateArtifactRequest{ + RunId: l.options.Run.GetRunId(), + TaskId: l.options.Task.GetTaskId(), + ProducerKey: artifactKey, + Artifact: artifact, + Type: apiV2beta1.IOType_OUTPUT, } - // TODO(neuromage): Consider batching these instead of recording one by one. - schema, err := getArtifactSchema(outputArtifact.GetType()) - if err != nil { - return nil, fmt.Errorf("failed to determine schema for output %q: %w", name, err) + if l.options.IterationIndex != nil { + request.IterationIndex = l.options.IterationIndex + request.Type = apiV2beta1.IOType_ITERATOR_OUTPUT } - mlmdArtifact, err := opts.metadataClient.RecordArtifact(ctx, name, schema, outputArtifact, pb.Artifact_LIVE, opts.bucketConfig) - if err != nil { - return nil, metadataErr(err) + l.batchUpdater.QueueArtifact(request) + } + } + return nil +} + +// determineIOType determines the appropriate IOType for a propagated output based on the parent task type +// and output definition. +func determineIOType( + isFirstLevel bool, + currentIOType apiV2beta1.IOType, + parentTask *apiV2beta1.PipelineTaskDetail, + parentOutputKey string, + parentOutputDefs *pipelinespec.ComponentOutputsSpec, + isParameter bool, +) apiV2beta1.IOType { + // For multi-level propagation, inherit the type from the previous level + if !isFirstLevel { + return currentIOType + } + + // First level: determine type based on parent context + if parentTask.GetType() == apiV2beta1.PipelineTaskDetail_LOOP { + // For loop iterations, use ITERATOR_OUTPUT + return apiV2beta1.IOType_ITERATOR_OUTPUT + } + + // Check if this is a ONE_OF output for condition branches + if parentTask.GetType() == apiV2beta1.PipelineTaskDetail_CONDITION_BRANCH { + if isParameter { + // For parameters, check if it's in output definitions and not a list + if paramDef, exists := parentOutputDefs.GetParameters()[parentOutputKey]; exists { + // If it's not marked as a list type, it's a ONE_OF output + if paramDef.GetParameterType() != pipelinespec.ParameterType_LIST { + return apiV2beta1.IOType_ONE_OF_OUTPUT + } + } + } else { + // For artifacts, check if it's in output definitions and not a list + if artifactDef, exists := parentOutputDefs.GetArtifacts()[parentOutputKey]; exists { + if !artifactDef.GetIsArtifactList() { + return apiV2beta1.IOType_ONE_OF_OUTPUT + } } - outputArtifacts = append(outputArtifacts, mlmdArtifact) } } - return outputArtifacts, nil + + // Default to OUTPUT for regular DAG outputs + return apiV2beta1.IOType_OUTPUT } -// waitForModelcar assumes the Modelcar has already been validated by the init container on the launcher -// pod. This waits for the Modelcar as a sidecar container to be ready. -func waitForModelcar(artifactURI string, localPath string) error { - glog.Infof("Waiting for the Modelcar %s to be available", artifactURI) +// propagateOutputsUpDAG traverses up the DAG hierarchy and creates artifact-task entries and parameter outputs +// for parent DAGs that declare the current task's outputs in their outputDefinitions. +// This enables output collection from child tasks (e.g., loop iterations) to parent DAGs. +func (l *LauncherV2) propagateOutputsUpDAG(ctx context.Context) error { + // If this task has no parent, nothing to propagate + if l.options.ParentTask == nil { + return nil + } - for { - _, err := os.Stat(localPath) - if err == nil { - glog.Infof("The Modelcar is now available at %s", localPath) + // Refresh the Run once to get all tasks with their latest state + // This eliminates the need for individual GetTask calls + fullView := apiV2beta1.GetRunRequest_FULL + refreshedRun, err := l.clientManager.KFPAPIClient().GetRun(ctx, &apiV2beta1.GetRunRequest{RunId: l.options.Run.GetRunId(), View: &fullView}) + if err != nil { + return fmt.Errorf("failed to refresh run before propagation: %w", err) + } - return nil + // Build a map of TaskID -> TaskDetail for fast lookups + taskMap := make(map[string]*apiV2beta1.PipelineTaskDetail) + for _, task := range refreshedRun.GetTasks() { + taskMap[task.GetTaskId()] = task + } + + // Get the refreshed current task from the map + currentTask, exists := taskMap[l.options.Task.GetTaskId()] + if !exists { + return fmt.Errorf("current task %s not found in refreshed run", l.options.Task.GetTaskId()) + } + + currentTaskOutputs := currentTask.GetOutputs() + if currentTaskOutputs == nil { + // No outputs to propagate + return nil + } + + hasArtifacts := len(currentTaskOutputs.GetArtifacts()) > 0 + hasParameters := len(currentTaskOutputs.GetParameters()) > 0 + if !hasArtifacts && !hasParameters { + // No outputs to propagate + return nil + } + + // Start traversing up from the immediate parent + parentTask := l.options.ParentTask + currentScopePath := l.options.ScopePath + isFirstLevel := true // Track if this is first-level propagation (from producing task to immediate parent) + + // Track propagated outputs (artifacts and parameters) for next level + type propagatedInfo struct { + key string + ioType apiV2beta1.IOType + producer *apiV2beta1.IOProducer + } + + for parentTask != nil { + // Get the parent's component spec to check outputDefinitions + parentScopePath, err := util.ScopePathFromStringPath(l.pipelineSpec, parentTask.GetScopePath()) + if err != nil { + return fmt.Errorf("failed to get scope path for parent task %s: %w", parentTask.GetTaskId(), err) } - if !os.IsNotExist(err) { - return fmt.Errorf( - "failed to see if the artifact %s was ready at %s; ensure the main container and Modelcar "+ - "container have the same UID (can be set with the PIPELINE_RUN_AS_USER environment variable on "+ - "the API server): %v", - artifactURI, localPath, err) + parentComponentSpec := parentScopePath.GetLast().GetComponentSpec() + if parentComponentSpec == nil { + return fmt.Errorf("parent task %s has no component spec", parentTask.GetTaskId()) } - time.Sleep(500 * time.Millisecond) - } -} + parentOutputDefs := parentComponentSpec.GetOutputDefinitions() + if parentOutputDefs == nil { + // Parent has no output definitions, stop propagating + break + } -func downloadArtifacts(ctx context.Context, executorInput *pipelinespec.ExecutorInput, defaultBucket *blob.Bucket, defaultBucketConfig *objectstore.Config, namespace string, k8sClient kubernetes.Interface) error { - // Read input artifact metadata. - nonDefaultBuckets, err := fetchNonDefaultBuckets(ctx, executorInput.GetInputs().GetArtifacts(), defaultBucketConfig, namespace, k8sClient) - closeNonDefaultBuckets := func(buckets map[string]*blob.Bucket) { - for name, bucket := range nonDefaultBuckets { - if closeBucketErr := bucket.Close(); closeBucketErr != nil { - glog.Warningf("failed to close bucket %q: %q", name, err.Error()) + hasParentArtifactOutputs := len(parentOutputDefs.GetArtifacts()) > 0 + hasParentParameterOutputs := len(parentOutputDefs.GetParameters()) > 0 + + if !hasParentArtifactOutputs && !hasParentParameterOutputs { + // Parent has no output definitions, stop propagating + break + } + + // Get child task name for matching outputs + childTaskName := currentScopePath.GetLast().GetTaskSpec().GetTaskInfo().GetName() + + newPropagatedArtifacts := make(map[string]propagatedInfo) + newPropagatedParameters := make(map[string]propagatedInfo) + + // Propagate artifacts + for _, artifactIO := range currentTaskOutputs.GetArtifacts() { + for _, artifact := range artifactIO.GetArtifacts() { + // Find the matching output key in parent's output definitions + matchingParentKey := findMatchingParentOutputKeyForChild( + childTaskName, + parentComponentSpec, + artifactIO.GetArtifactKey(), + parentOutputDefs, + ) + + if matchingParentKey == "" { + // This output is not declared in parent's outputDefinitions + continue + } + + // Determine the correct IOType + ioType := determineIOType( + isFirstLevel, + artifactIO.GetType(), + parentTask, + matchingParentKey, + parentOutputDefs, + false, // isParameter = false + ) + + // Create artifact-task entry for the parent + // Producer is the child task from parent's perspective, not the original producing task + producer := &apiV2beta1.IOProducer{ + TaskName: childTaskName, + } + + // Only a Runtime Task in an iteration can have an Output and an Iteration Index + // for its output. + if ioType == apiV2beta1.IOType_ITERATOR_OUTPUT && currentTask.TypeAttributes != nil && currentTask.TypeAttributes.IterationIndex != nil { + producer.Iteration = artifactIO.Producer.Iteration + } + + artifactTask := &apiV2beta1.ArtifactTask{ + ArtifactId: artifact.GetArtifactId(), + TaskId: parentTask.GetTaskId(), + RunId: l.options.Run.GetRunId(), + Key: matchingParentKey, + Type: ioType, + Producer: producer, + } + + // Queue artifact-task creation instead of creating immediately + l.batchUpdater.QueueArtifactTask(artifactTask) + + // Track this artifact for next level propagation with its IOType + newPropagatedArtifacts[artifact.GetArtifactId()] = propagatedInfo{ + key: matchingParentKey, + ioType: ioType, + producer: producer, + } } } - } - defer closeNonDefaultBuckets(nonDefaultBuckets) - if err != nil { - return fmt.Errorf("failed to fetch non default buckets: %w", err) - } - for name, artifactList := range executorInput.GetInputs().GetArtifacts() { - // TODO(neuromage): Support concat-based placholders for arguments. - if len(artifactList.Artifacts) == 0 { - continue + // Propagate parameters + // Use the parent task from the task map if we have parameters to propagate + currentParentTask, exists := taskMap[parentTask.GetTaskId()] + if !exists { + return fmt.Errorf("parent task %s not found in task map", parentTask.GetTaskId()) + } + + // Initialize outputs if needed + if currentParentTask.Outputs == nil { + currentParentTask.Outputs = &apiV2beta1.PipelineTaskDetail_InputOutputs{} } - for _, artifact := range artifactList.Artifacts { - // Iterating through the artifact list allows for collected artifacts to be properly consumed. - inputArtifact := artifact - localPath, err := LocalPathForURI(inputArtifact.Uri) - if err != nil { - glog.Warningf("Input Artifact %q does not have a recognized storage URI %q. Skipping downloading to local path.", name, inputArtifact.Uri) + for _, paramIO := range currentTaskOutputs.GetParameters() { + // Find the matching output key in parent's output definitions + matchingParentKey := findMatchingParentOutputKeyForChildParameter( + childTaskName, + parentComponentSpec, + paramIO.GetParameterKey(), + parentOutputDefs, + ) + + if matchingParentKey == "" { + // This output is not declared in parent's outputDefinitions continue } - // OCI artifacts are accessed via shared storage of a Modelcar - if strings.HasPrefix(inputArtifact.Uri, "oci://") { - err := waitForModelcar(inputArtifact.Uri, localPath) - if err != nil { - return err - } + // Determine the correct IOType + ioType := determineIOType( + isFirstLevel, + paramIO.GetType(), + parentTask, + matchingParentKey, + parentOutputDefs, + true, // isParameter = true + ) + + // Create parameter entry for the parent + // Producer is the child task from parent's perspective, not the original producing task + paramProducer := &apiV2beta1.IOProducer{ + TaskName: childTaskName, + } - continue + // Include iteration index for IOType_OUTPUT type + if ioType == apiV2beta1.IOType_ITERATOR_OUTPUT && currentTask.TypeAttributes != nil && currentTask.TypeAttributes.IterationIndex != nil { + paramProducer.Iteration = paramIO.Producer.Iteration } - // Copy artifact to local storage. - copyErr := func(err error) error { - return fmt.Errorf("failed to download input artifact %q from remote storage URI %q: %w", name, inputArtifact.Uri, err) + newParam := &apiV2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + ParameterKey: matchingParentKey, + Value: paramIO.GetValue(), + Type: ioType, + Producer: paramProducer, } - // TODO: Selectively copy artifacts for which .path was actually specified - // on the command line. - bucket := defaultBucket - bucketConfig := defaultBucketConfig - if !strings.HasPrefix(inputArtifact.Uri, defaultBucketConfig.PrefixedBucket()) { - nonDefaultBucketConfig, err := objectstore.ParseBucketConfigForArtifactURI(inputArtifact.Uri) - if err != nil { - return fmt.Errorf("failed to parse bucketConfig for output artifact %q with uri %q: %w", name, inputArtifact.GetUri(), err) + + // Accumulate parameter to parent task (will queue update later) + currentParentTask.Outputs.Parameters = append(currentParentTask.Outputs.Parameters, newParam) + + // Track this parameter for next level propagation with its IOType + // Use parameter key as the identifier since parameters don't have IDs like artifacts + paramIdentifier := fmt.Sprintf("%s:%s", paramIO.GetParameterKey(), paramIO.GetValue().String()) + newPropagatedParameters[paramIdentifier] = propagatedInfo{ + key: matchingParentKey, + ioType: ioType, + producer: paramProducer, + } + } + + // Queue parent task update if we modified it with parameters + if len(newPropagatedParameters) > 0 { + l.batchUpdater.QueueTaskUpdate(currentParentTask) + } + + // Move up to the next parent + if parentTask.ParentTaskId == nil || *parentTask.ParentTaskId == "" { + break + } + + // Get the next parent task from the task map + nextParent, exists := taskMap[*parentTask.ParentTaskId] + if !exists { + return fmt.Errorf("next parent task %s not found in task map", *parentTask.ParentTaskId) + } + + // For the next level, we only want to propagate the outputs we just added to this parent + // Build a new currentTaskOutputs with only the newly propagated outputs + newTaskOutputs := &apiV2beta1.PipelineTaskDetail_InputOutputs{ + Artifacts: []*apiV2beta1.PipelineTaskDetail_InputOutputs_IOArtifact{}, + Parameters: []*apiV2beta1.PipelineTaskDetail_InputOutputs_IOParameter{}, + } + + // Build artifact outputs for next level + for artifactID, info := range newPropagatedArtifacts { + // Find the artifact object + var foundArtifact *apiV2beta1.Artifact + for _, artifactIO := range currentTaskOutputs.GetArtifacts() { + for _, artifact := range artifactIO.GetArtifacts() { + if artifact.GetArtifactId() == artifactID { + foundArtifact = artifact + break + } } - nonDefaultBucket, ok := nonDefaultBuckets[nonDefaultBucketConfig.PrefixedBucket()] - if !ok { - return fmt.Errorf("failed to get bucket when downloading input artifact %s with bucket key %s: %w", name, nonDefaultBucketConfig.PrefixedBucket(), err) + if foundArtifact != nil { + break } - bucket = nonDefaultBucket - bucketConfig = nonDefaultBucketConfig } - blobKey, err := bucketConfig.KeyFromURI(inputArtifact.Uri) - if err != nil { - return copyErr(err) + + if foundArtifact != nil { + IOArtifact := &apiV2beta1.PipelineTaskDetail_InputOutputs_IOArtifact{ + ArtifactKey: info.key, + Artifacts: []*apiV2beta1.Artifact{foundArtifact}, + Type: info.ioType, + Producer: info.producer, + } + newTaskOutputs.Artifacts = append(newTaskOutputs.Artifacts, IOArtifact) + } + } + + // Build parameter outputs for next level + for paramIdentifier, info := range newPropagatedParameters { + // Find the parameter object by matching key and value + var foundParam *apiV2beta1.PipelineTaskDetail_InputOutputs_IOParameter + for _, paramIO := range currentTaskOutputs.GetParameters() { + identifier := fmt.Sprintf("%s:%s", paramIO.GetParameterKey(), paramIO.GetValue().String()) + if identifier == paramIdentifier { + foundParam = paramIO + break + } } - if err := objectstore.DownloadBlob(ctx, bucket, localPath, blobKey); err != nil { - return copyErr(err) + + if foundParam != nil { + newTaskOutputs.Parameters = append(newTaskOutputs.Parameters, &apiV2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + ParameterKey: info.key, + Value: foundParam.GetValue(), + Type: info.ioType, + Producer: foundParam.GetProducer(), + }) } } + if len(newTaskOutputs.GetArtifacts()) == 0 && len(newTaskOutputs.GetParameters()) == 0 { + // No more outputs to propagate + break + } + + // Move to the next level + currentTaskOutputs = newTaskOutputs + currentTask = parentTask + parentTask = nextParent + currentScopePath = parentScopePath + isFirstLevel = false // After the first iteration, we're doing multi-level propagation } + return nil } -func fetchNonDefaultBuckets( - ctx context.Context, - artifacts map[string]*pipelinespec.ArtifactList, - defaultBucketConfig *objectstore.Config, - namespace string, - k8sClient kubernetes.Interface, -) (buckets map[string]*blob.Bucket, err error) { - nonDefaultBuckets := make(map[string]*blob.Bucket) - for name, artifactList := range artifacts { - if len(artifactList.Artifacts) == 0 { +// findMatchingParentOutputKeyForChild finds the parent output key that corresponds to the child's output. +// This is a simplified version that takes the child task name directly as a parameter. +func findMatchingParentOutputKeyForChild( + childTaskName string, + parentComponentSpec *pipelinespec.ComponentSpec, + childOutputKey string, + parentOutputDefs *pipelinespec.ComponentOutputsSpec, +) string { + // Get the task spec from the parent's perspective + if parentComponentSpec == nil || parentComponentSpec.GetDag() == nil { + return "" + } + + // Look through parent's DAG tasks to find the child task + for _, dagTask := range parentComponentSpec.GetDag().GetTasks() { + if dagTask.GetTaskInfo().GetName() != childTaskName { continue } - // TODO: Support multiple artifacts someday, probably through the v2 engine. - artifact := artifactList.Artifacts[0] + // Found the child task in parent's DAG + // Check the task's output selectors + if dagTask.GetComponentRef() != nil { + // Look at the parent's output definitions to find which one uses this task's output + for parentOutputKey := range parentOutputDefs.GetArtifacts() { + // Check if this parent output is sourced from the child task + // The parent output may be directly from task output or from an artifact selector + if artifactSelectorMatches(parentComponentSpec, parentOutputKey, childTaskName, childOutputKey) { + return parentOutputKey + } + } + } + } + + return "" +} - // OCI artifacts are accessed via shared storage of a Modelcar - if strings.HasPrefix(artifact.Uri, "oci://") { +// findMatchingParentOutputKeyForChildParameter finds the parent output key that corresponds to the child's parameter output. +func findMatchingParentOutputKeyForChildParameter( + childTaskName string, + parentComponentSpec *pipelinespec.ComponentSpec, + childOutputKey string, + parentOutputDefs *pipelinespec.ComponentOutputsSpec, +) string { + // Get the task spec from the parent's perspective + if parentComponentSpec == nil || parentComponentSpec.GetDag() == nil { + return "" + } + + // Look through parent's DAG tasks to find the child task + for _, dagTask := range parentComponentSpec.GetDag().GetTasks() { + if dagTask.GetTaskInfo().GetName() != childTaskName { continue } - // The artifact does not belong under the object store path for this run. Cases: - // 1. Artifact is cached from a different run, so it may still be in the default bucket, but under a different run id subpath - // 2. Artifact is imported from the same bucket, but from a different path (re-use the same session) - // 3. Artifact is imported from a different bucket, or obj store (default to using user env in this case) - if !strings.HasPrefix(artifact.Uri, defaultBucketConfig.PrefixedBucket()) { - nonDefaultBucketConfig, parseErr := objectstore.ParseBucketConfigForArtifactURI(artifact.Uri) - if parseErr != nil { - return nonDefaultBuckets, fmt.Errorf("failed to parse bucketConfig for output artifact %q with uri %q: %w", name, artifact.GetUri(), parseErr) - } - // check if it's same bucket but under a different path, re-use the default bucket session in this case. - if (nonDefaultBucketConfig.Scheme == defaultBucketConfig.Scheme) && (nonDefaultBucketConfig.BucketName == defaultBucketConfig.BucketName) { - nonDefaultBucketConfig.SessionInfo = defaultBucketConfig.SessionInfo + // Found the child task in parent's DAG + // Check the task's output selectors + if dagTask.GetComponentRef() != nil { + // Look at the parent's output definitions to find which one uses this task's parameter output + for parentOutputKey := range parentOutputDefs.GetParameters() { + // Check if this parent output is sourced from the child task + if parameterSelectorMatches(parentComponentSpec, parentOutputKey, childTaskName, childOutputKey) { + return parentOutputKey + } } - nonDefaultBucket, bucketErr := objectstore.OpenBucket(ctx, k8sClient, namespace, nonDefaultBucketConfig) - if bucketErr != nil { - return nonDefaultBuckets, fmt.Errorf("failed to open bucket for output artifact %q with uri %q: %w", name, artifact.GetUri(), bucketErr) + } + } + + return "" +} + +// parameterSelectorMatches checks if a parent output parameter selector matches the child task output +func parameterSelectorMatches( + parentComponentSpec *pipelinespec.ComponentSpec, + parentOutputKey string, + childTaskName string, + childOutputKey string, +) bool { + // Check parameter selectors + dag := parentComponentSpec.GetDag() + if dag == nil || dag.GetOutputs() == nil { + return false + } + + parameterSelectors := dag.GetOutputs().GetParameters() + if parameterSelectors == nil { + return false + } + + selector, exists := parameterSelectors[parentOutputKey] + if !exists { + return false + } + + // Check if the selector references the child task + // Check value_from_parameter (single parameter selector) + if paramSelector := selector.GetValueFromParameter(); paramSelector != nil { + if paramSelector.GetProducerSubtask() == childTaskName && + paramSelector.GetOutputParameterKey() == childOutputKey { + return true + } + } + + // Check value_from_oneof (list of parameter selectors for condition branches) + if oneofSelector := selector.GetValueFromOneof(); oneofSelector != nil { + for _, paramSelector := range oneofSelector.GetParameterSelectors() { + if paramSelector.GetProducerSubtask() == childTaskName && + paramSelector.GetOutputParameterKey() == childOutputKey { + return true } - nonDefaultBuckets[nonDefaultBucketConfig.PrefixedBucket()] = nonDefaultBucket } + } + return false +} + +// artifactSelectorMatches checks if a parent output artifact selector matches the child task output +func artifactSelectorMatches( + parentComponentSpec *pipelinespec.ComponentSpec, + parentOutputKey string, + childTaskName string, + childOutputKey string, +) bool { + // Check artifact selectors + dag := parentComponentSpec.GetDag() + if dag == nil || dag.GetOutputs() == nil { + return false + } + + artifactSelectors := dag.GetOutputs().GetArtifacts() + if artifactSelectors == nil { + return false + } + + selector, exists := artifactSelectors[parentOutputKey] + if !exists { + return false } - return nonDefaultBuckets, nil + // Check if the selector references the child task + for _, artifactSelector := range selector.GetArtifactSelectors() { + if artifactSelector.GetProducerSubtask() == childTaskName && + artifactSelector.GetOutputArtifactKey() == childOutputKey { + return true + } + } + + return false +} + +// waitForModelcar assumes the Modelcar has already been validated by the init container on the launcher +// pod. This waits for the Modelcar as a sidecar container to be ready. +func waitForModelcar(artifactURI string, localPath string) error { + glog.Infof("Waiting for the Modelcar %s to be available", artifactURI) + + for { + _, err := os.Stat(localPath) + if err == nil { + glog.Infof("The Modelcar is now available at %s", localPath) + + return nil + } + + if !os.IsNotExist(err) { + return fmt.Errorf( + "failed to see if the artifact %s was ready at %s; ensure the main container and Modelcar "+ + "container have the same UID (can be set with the PIPELINE_RUN_AS_USER environment variable on "+ + "the API server): %v", + artifactURI, localPath, err) + } + + time.Sleep(500 * time.Millisecond) + } +} + +func (l *LauncherV2) downloadArtifacts(ctx context.Context) error { + for artifactKey, artifactList := range l.executorInput.GetInputs().GetArtifacts() { + for _, artifact := range artifactList.Artifacts { + localPath, err := LocalPathForURI(artifact.Uri) + if err != nil { + glog.Warningf("Input Artifact %q does not have a recognized storage URI %q. Skipping downloading to local path.", artifactKey, artifact.Uri) + continue + } + // OCI artifacts are accessed via shared storage of a Modelcar + if strings.HasPrefix(artifact.Uri, "oci://") { + err := waitForModelcar(artifact.Uri, localPath) + if err != nil { + return err + } + continue + } + + err = l.objectStore.DownloadArtifact(ctx, artifact.Uri, localPath, artifactKey) + if err != nil { + return fmt.Errorf("failed to download input artifact %q from remote storage URI %q: %w", artifactKey, artifact.Uri, err) + } + } + } + return nil } func compileCmdAndArgs(executorInput *pipelinespec.ExecutorInput, cmd string, args []string) (string, []string, error) { placeholders, err := getPlaceholders(executorInput) - + if err != nil { + return "", nil, err + } executorInputJSON, err := protojson.Marshal(executorInput) if err != nil { return "", nil, fmt.Errorf("failed to convert ExecutorInput into JSON: %w", err) @@ -918,12 +1339,12 @@ func getPlaceholders(executorInput *pipelinespec.ExecutorInput) (placeholders ma return placeholders, nil } -func getArtifactSchema(schema *pipelinespec.ArtifactTypeSchema) (string, error) { +func getArtifactSchemaType(schema *pipelinespec.ArtifactTypeSchema) (string, error) { switch t := schema.Kind.(type) { case *pipelinespec.ArtifactTypeSchema_InstanceSchema: return t.InstanceSchema, nil case *pipelinespec.ArtifactTypeSchema_SchemaTitle: - return "title: " + t.SchemaTitle, nil + return t.SchemaTitle, nil case *pipelinespec.ArtifactTypeSchema_SchemaUri: return "", fmt.Errorf("SchemaUri is unsupported") default: @@ -951,14 +1372,14 @@ func mergeRuntimeArtifacts(src, dst *pipelinespec.RuntimeArtifact) { } } -func getExecutorOutputFile(path string) (*pipelinespec.ExecutorOutput, error) { +func (l *LauncherV2) getExecutorOutputFile(path string) (*pipelinespec.ExecutorOutput, error) { // collect user executor output file executorOutput := &pipelinespec.ExecutorOutput{ ParameterValues: map[string]*structpb.Value{}, Artifacts: map[string]*pipelinespec.ArtifactList{}, } - _, err := os.Stat(path) + _, err := l.fileSystem.Stat(path) if err != nil { if os.IsNotExist(err) { glog.Infof("output metadata file does not exist in %s", path) @@ -969,7 +1390,7 @@ func getExecutorOutputFile(path string) (*pipelinespec.ExecutorOutput, error) { } } - b, err := os.ReadFile(path) + b, err := l.fileSystem.ReadFile(path) if err != nil { return nil, fmt.Errorf("failed to read output metadata file %q: %w", path, err) } @@ -983,17 +1404,20 @@ func getExecutorOutputFile(path string) (*pipelinespec.ExecutorOutput, error) { } func LocalPathForURI(uri string) (string, error) { + // Used for local debugging + rootPath := os.Getenv("ARTIFACT_LOCAL_PATH") + if strings.HasPrefix(uri, "gs://") { - return "/gcs/" + strings.TrimPrefix(uri, "gs://"), nil + return fmt.Sprintf("%s/gcs/", rootPath) + strings.TrimPrefix(uri, "gs://"), nil } if strings.HasPrefix(uri, "minio://") { - return "/minio/" + strings.TrimPrefix(uri, "minio://"), nil + return fmt.Sprintf("%s/minio/", rootPath) + strings.TrimPrefix(uri, "minio://"), nil } if strings.HasPrefix(uri, "s3://") { - return "/s3/" + strings.TrimPrefix(uri, "s3://"), nil + return fmt.Sprintf("%s/s3/", rootPath) + strings.TrimPrefix(uri, "s3://"), nil } if strings.HasPrefix(uri, "oci://") { - return "/oci/" + strings.ReplaceAll(strings.TrimPrefix(uri, "oci://"), "/", "_") + "/models", nil + return fmt.Sprintf("%s/oci/", rootPath) + strings.ReplaceAll(strings.TrimPrefix(uri, "oci://"), "/", "_") + "/models", nil } return "", fmt.Errorf("failed to generate local path for URI %s: unsupported storage scheme", uri) } @@ -1008,10 +1432,10 @@ func retrieveArtifactPath(artifact *pipelinespec.RuntimeArtifact) (string, error } } -func prepareOutputFolders(executorInput *pipelinespec.ExecutorInput) error { +func (l *LauncherV2) prepareOutputFolders(executorInput *pipelinespec.ExecutorInput) error { for name, parameter := range executorInput.GetOutputs().GetParameters() { dir := filepath.Dir(parameter.OutputFile) - if err := os.MkdirAll(dir, 0755); err != nil { + if err := l.fileSystem.MkdirAll(dir, 0755); err != nil { return fmt.Errorf("failed to create directory %q for output parameter %q: %w", dir, name, err) } } @@ -1028,7 +1452,7 @@ func prepareOutputFolders(executorInput *pipelinespec.ExecutorInput) error { return fmt.Errorf("failed to generate local storage path for output artifact %q: %w", name, err) } - if err := os.MkdirAll(filepath.Dir(localPath), 0755); err != nil { + if err := l.fileSystem.MkdirAll(filepath.Dir(localPath), 0755); err != nil { return fmt.Errorf("unable to create directory %q for output artifact %q: %w", filepath.Dir(localPath), name, err) } } @@ -1036,27 +1460,3 @@ func prepareOutputFolders(executorInput *pipelinespec.ExecutorInput) error { return nil } - -// Adds default parameter values if there is no user provided value -func addDefaultParams( - executorInput *pipelinespec.ExecutorInput, - component *pipelinespec.ComponentSpec, -) (*pipelinespec.ExecutorInput, error) { - // Make a deep copy so we don't alter the original data - executorInputWithDefaultMsg := proto.Clone(executorInput) - executorInputWithDefault, ok := executorInputWithDefaultMsg.(*pipelinespec.ExecutorInput) - if !ok { - return nil, fmt.Errorf("bug: cloned executor input message does not have expected type") - } - - if executorInputWithDefault.GetInputs().GetParameterValues() == nil { - executorInputWithDefault.Inputs.ParameterValues = make(map[string]*structpb.Value) - } - for name, value := range component.GetInputDefinitions().GetParameters() { - _, hasInput := executorInputWithDefault.GetInputs().GetParameterValues()[name] - if value.GetDefaultValue() != nil && !hasInput { - executorInputWithDefault.GetInputs().GetParameterValues()[name] = value.GetDefaultValue() - } - } - return executorInputWithDefault, nil -} diff --git a/backend/src/v2/component/launcher_v2_test.go b/backend/src/v2/component/launcher_v2_test.go index 0b44fbb69b2..4ccad5273b1 100644 --- a/backend/src/v2/component/launcher_v2_test.go +++ b/backend/src/v2/component/launcher_v2_test.go @@ -14,24 +14,23 @@ package component import ( + "bytes" "context" - "crypto/tls" "encoding/json" "errors" "io" "os" "testing" - "github.com/kubeflow/pipelines/backend/src/v2/cacheutils" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/backend/src/v2/apiclient/kfpapi" "github.com/kubeflow/pipelines/backend/src/v2/client_manager" + "github.com/stretchr/testify/require" "google.golang.org/protobuf/encoding/protojson" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" - "github.com/kubeflow/pipelines/backend/src/v2/metadata" - "github.com/kubeflow/pipelines/backend/src/v2/objectstore" + apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" "github.com/stretchr/testify/assert" - "gocloud.dev/blob" - _ "gocloud.dev/blob/memblob" "google.golang.org/protobuf/types/known/structpb" "k8s.io/client-go/kubernetes/fake" ) @@ -51,70 +50,299 @@ var addNumbersComponent = &pipelinespec.ComponentSpec{ }, } -// Tests that launcher correctly executes the user component and successfully writes output parameters to file. -func Test_executeV2_Parameters(t *testing.T) { - tests := []struct { - name string - executorInput *pipelinespec.ExecutorInput - executorArgs []string - wantErr bool - }{ - { - "happy pass", - &pipelinespec.ExecutorInput{ - Inputs: &pipelinespec.ExecutorInput_Inputs{ - ParameterValues: map[string]*structpb.Value{"a": structpb.NewNumberValue(1), "b": structpb.NewNumberValue(2)}, +// Example_launcherV2WithMocks demonstrates how to test LauncherV2.Execute with all dependencies mocked. +// This example shows the complete pattern for component-level testing. +func TestExample_launcherV2WithMocks(t *testing.T) { + // Step 1: Create mock KFP API + mockAPI := kfpapi.NewMockAPI() + + // Step 2: Create test run and task + runID := "test-run-123" + taskID := "test-task-456" + + run := &apiv2beta1.Run{ + RunId: runID, + DisplayName: "test-run", + State: apiv2beta1.RuntimeState_RUNNING, + PipelineSource: &apiv2beta1.Run_PipelineSpec{ + PipelineSpec: &structpb.Struct{}, + }, + Tasks: []*apiv2beta1.PipelineTaskDetail{}, + } + mockAPI.AddRun(run) + + task := &apiv2beta1.PipelineTaskDetail{ + TaskId: taskID, + RunId: runID, + Name: "test-task", + State: apiv2beta1.PipelineTaskDetail_RUNNING, + Type: apiv2beta1.PipelineTaskDetail_RUNTIME, + Inputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{}, + Outputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{}, + } + + // Step 3: Create executor input with inputs and outputs + executorInput := &pipelinespec.ExecutorInput{ + Inputs: &pipelinespec.ExecutorInput_Inputs{ + ParameterValues: map[string]*structpb.Value{ + "input_param": structpb.NewStringValue("test_value"), + }, + Artifacts: map[string]*pipelinespec.ArtifactList{ + "input_data": { + Artifacts: []*pipelinespec.RuntimeArtifact{ + { + Name: "dataset", + Uri: "s3://bucket/input/data.csv", + Type: &pipelinespec.ArtifactTypeSchema{ + Kind: &pipelinespec.ArtifactTypeSchema_SchemaTitle{ + SchemaTitle: "system.Dataset", + }, + }, + }, + }, }, }, - []string{"-c", "test {{$.inputs.parameters['a']}} -eq 1 || exit 1\ntest {{$.inputs.parameters['b']}} -eq 2 || exit 1"}, - false, }, - { - "use default value", - &pipelinespec.ExecutorInput{ - Inputs: &pipelinespec.ExecutorInput_Inputs{ - ParameterValues: map[string]*structpb.Value{"b": structpb.NewNumberValue(2)}, + Outputs: &pipelinespec.ExecutorInput_Outputs{ + Parameters: map[string]*pipelinespec.ExecutorInput_OutputParameter{ + "output_metric": { + OutputFile: "/tmp/outputs/output_metric", }, }, - []string{"-c", "test {{$.inputs.parameters['a']}} -eq 5 || exit 1\ntest {{$.inputs.parameters['b']}} -eq 2 || exit 1"}, - false, + Artifacts: map[string]*pipelinespec.ArtifactList{ + "model": { + Artifacts: []*pipelinespec.RuntimeArtifact{ + { + Name: "trained-model", + Uri: "s3://bucket/output/model.pkl", + Type: &pipelinespec.ArtifactTypeSchema{ + Kind: &pipelinespec.ArtifactTypeSchema_SchemaTitle{ + SchemaTitle: "system.Model", + }, + }, + }, + }, + }, + }, + OutputFile: "/tmp/kfp_outputs/output_metadata.json", }, } - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - fakeKubernetesClientset := &fake.Clientset{} - fakeMetadataClient := metadata.NewFakeClient() - bucket, err := blob.OpenBucket(context.Background(), "mem://test-bucket") - assert.Nil(t, err) - bucketConfig, err := objectstore.ParseBucketConfig("mem://test-bucket/pipeline-root/", nil) - assert.Nil(t, err) - _, _, err = executeV2( - context.Background(), - test.executorInput, - addNumbersComponent, - "sh", - test.executorArgs, - bucket, - bucketConfig, - fakeMetadataClient, - "namespace", - fakeKubernetesClientset, - "false", - "", - ) + executorInputJSON, _ := protojson.Marshal(executorInput) - if test.wantErr { - assert.NotNil(t, err) - } else { - assert.Nil(t, err) + // Step 4: Create component spec + componentSpec := &pipelinespec.ComponentSpec{ + InputDefinitions: &pipelinespec.ComponentInputsSpec{ + Parameters: map[string]*pipelinespec.ComponentInputsSpec_ParameterSpec{ + "input_param": { + ParameterType: pipelinespec.ParameterType_STRING, + }, + }, + }, + OutputDefinitions: &pipelinespec.ComponentOutputsSpec{ + Parameters: map[string]*pipelinespec.ComponentOutputsSpec_ParameterSpec{ + "output_metric": { + ParameterType: pipelinespec.ParameterType_NUMBER_DOUBLE, + }, + }, + }, + } - } - }) + // Step 5: Create task spec + taskSpec := &pipelinespec.PipelineTaskSpec{ + TaskInfo: &pipelinespec.PipelineTaskInfo{ + Name: "train-model", + }, + } + + // Step 6: Create launcher options + opts := &LauncherV2Options{ + Namespace: "default", + PodName: "train-model-pod", + PodUID: "pod-uid-123", + PipelineName: "training-pipeline", + PublishLogs: "false", + ComponentSpec: componentSpec, + TaskSpec: taskSpec, + ScopePath: util.ScopePath{}, + Run: run, + Task: task, + PipelineSpec: &structpb.Struct{}, + } + + // Step 7: Create launcher with client manager + clientManager := client_manager.NewFakeClientManager(fake.NewClientset(), mockAPI) + launcher, err := NewLauncherV2( + string(executorInputJSON), + []string{"python", "train.py", "--data", "{{$.inputs.artifacts['input_data'].path}}"}, + opts, + clientManager, + ) + require.NoError(t, err) + + // Step 8: Setup mocks for dependencies + mockFS := NewMockFileSystem() + mockCmd := NewMockCommandExecutor() + mockObjStore := NewMockObjectStoreClient() + + // Configure file system with output data + mockFS.SetFileContent("/tmp/outputs/output_metric", []byte("0.95")) + mockFS.SetFileContent("/tmp/kfp_outputs/output_metadata.json", []byte("{}")) + + // Configure object store with input data + mockObjStore.SetArtifact("s3://bucket/input/data.csv", []byte("col1,col2\n1,2\n")) + + // Configure command executor to succeed + mockCmd.RunError = nil + + // Step 9: Inject mocks into launcher + launcher.WithFileSystem(mockFS). + WithCommandExecutor(mockCmd). + WithObjectStore(mockObjStore) + + // Step 10: Execute the launcher's internal execute method + ctx := context.Background() + executorOutput, err := launcher.execute(ctx, "python", []string{"train.py"}) + require.NotNil(t, executorOutput) + if err != nil { + panic(err) + } + + // Output: Test passed - launcher executed successfully with mocked dependencies + println("Test passed - launcher executed successfully with mocked dependencies") +} + +// TestLauncherV2_ArtifactHandling demonstrates testing artifact download and upload +func TestLauncherV2_ArtifactHandling(t *testing.T) { + // Setup + ctx := context.Background() + mockObjStore := NewMockObjectStoreClient() + + // Simulate pre-existing input artifact + mockObjStore.SetArtifact("s3://bucket/input/dataset.csv", []byte("training,data")) + + // Test download + err := mockObjStore.DownloadArtifact(ctx, "s3://bucket/input/dataset.csv", "/local/dataset.csv", "input_data") + require.NoError(t, err) + + // Verify download was called with correct parameters + assert.Len(t, mockObjStore.DownloadCalls, 1) + assert.Equal(t, "input_data", mockObjStore.DownloadCalls[0].ArtifactKey) + assert.Equal(t, "s3://bucket/input/dataset.csv", mockObjStore.DownloadCalls[0].RemoteURI) + assert.Equal(t, "/local/dataset.csv", mockObjStore.DownloadCalls[0].LocalPath) + + // Test upload + err = mockObjStore.UploadArtifact(ctx, "/local/model.pkl", "s3://bucket/output/model.pkl", "model_output") + require.NoError(t, err) + + // Verify upload was called + assert.Len(t, mockObjStore.UploadCalls, 1) + assert.Equal(t, "model_output", mockObjStore.UploadCalls[0].ArtifactKey) + + // Verify artifact can be queried + modelUploads := mockObjStore.GetUploadCallsForKey("model_output") + assert.Len(t, modelUploads, 1) + assert.Equal(t, "s3://bucket/output/model.pkl", modelUploads[0].RemoteURI) +} + +// TestLauncherV2_CommandExecution demonstrates testing command execution +func TestLauncherV2_CommandExecution(t *testing.T) { + mockCmd := NewMockCommandExecutor() + + // Setup custom behavior to write to stdout + mockCmd.RunFunc = func(ctx context.Context, cmd string, args []string, stdin io.Reader, stdout, stderr io.Writer) error { + // Simulate successful execution + stdout.Write([]byte("Training completed successfully\n")) + stdout.Write([]byte("Accuracy: 0.95\n")) + return nil + } + + // Execute command + ctx := context.Background() + var stdout, stderr bytes.Buffer + err := mockCmd.Run(ctx, "python", []string{"train.py"}, nil, &stdout, &stderr) + + // Verify + require.NoError(t, err) + assert.Contains(t, stdout.String(), "Training completed successfully") + assert.Contains(t, stdout.String(), "Accuracy: 0.95") + + // Verify command was called correctly + assert.Equal(t, 1, mockCmd.CallCount()) + assert.Equal(t, "python", mockCmd.RunCalls[0].Cmd) + assert.Equal(t, []string{"train.py"}, mockCmd.RunCalls[0].Args) +} + +// TestLauncherV2_FileSystemOperations demonstrates testing file system operations +func TestLauncherV2_FileSystemOperations(t *testing.T) { + mockFS := NewMockFileSystem() + + // Test directory creation + err := mockFS.MkdirAll("/tmp/outputs", 0755) + require.NoError(t, err) + + // Test file writing + err = mockFS.WriteFile("/tmp/outputs/metrics.json", []byte(`{"accuracy": 0.95}`), 0644) + require.NoError(t, err) + + // Test file reading + content, err := mockFS.ReadFile("/tmp/outputs/metrics.json") + require.NoError(t, err) + assert.Equal(t, `{"accuracy": 0.95}`, string(content)) + + // Verify all operations were tracked + assert.Len(t, mockFS.MkdirAllCalls, 1) + assert.Equal(t, "/tmp/outputs", mockFS.MkdirAllCalls[0].Path) + + assert.Len(t, mockFS.WriteFileCalls, 1) + assert.Equal(t, "/tmp/outputs/metrics.json", mockFS.WriteFileCalls[0].Name) + + assert.Len(t, mockFS.ReadFileCalls, 1) + assert.Equal(t, "/tmp/outputs/metrics.json", mockFS.ReadFileCalls[0]) +} + +// TestLauncherV2_TaskStatusUpdates demonstrates testing KFP API task updates +func TestLauncherV2_TaskStatusUpdates(t *testing.T) { + // Create mock API + mockAPI := kfpapi.NewMockAPI() + + // Create test run + run := &apiv2beta1.Run{ + RunId: "run-123", + DisplayName: "test-run", + State: apiv2beta1.RuntimeState_RUNNING, + PipelineSource: &apiv2beta1.Run_PipelineSpec{ + PipelineSpec: &structpb.Struct{}, + }, + } + mockAPI.AddRun(run) + + // Create test task + task := &apiv2beta1.PipelineTaskDetail{ + TaskId: "task-456", + RunId: "run-123", + Name: "test-task", + State: apiv2beta1.PipelineTaskDetail_RUNNING, } + _, err := mockAPI.CreateTask(context.Background(), &apiv2beta1.CreateTaskRequest{Task: task}) + require.NoError(t, err) + + // Update task status + task.State = apiv2beta1.PipelineTaskDetail_SUCCEEDED + _, err = mockAPI.UpdateTask(context.Background(), &apiv2beta1.UpdateTaskRequest{ + TaskId: "task-456", + Task: task, + }) + require.NoError(t, err) + + // Verify task was updated + updatedTask, err := mockAPI.GetTask(context.Background(), &apiv2beta1.GetTaskRequest{TaskId: "task-456"}) + require.NoError(t, err) + assert.Equal(t, apiv2beta1.PipelineTaskDetail_SUCCEEDED, updatedTask.State) } -func Test_executeV2_publishLogs(t *testing.T) { +// Tests that launcher correctly executes the user component and successfully writes output parameters to file. +func Test_execute_Parameters(t *testing.T) { tests := []struct { name string executorInput *pipelinespec.ExecutorInput @@ -145,32 +373,79 @@ func Test_executeV2_publishLogs(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - fakeKubernetesClientset := &fake.Clientset{} - fakeMetadataClient := metadata.NewFakeClient() - bucket, err := blob.OpenBucket(context.Background(), "mem://test-bucket") - assert.Nil(t, err) - bucketConfig, err := objectstore.ParseBucketConfig("mem://test-bucket/pipeline-root/", nil) + // Setup executor input with outputs section + test.executorInput.Outputs = &pipelinespec.ExecutorInput_Outputs{ + OutputFile: "/tmp/kfp_outputs/output_metadata.json", + } + + // Marshal executor input + executorInputJSON, err := protojson.Marshal(test.executorInput) assert.Nil(t, err) - _, _, err = executeV2( - context.Background(), - test.executorInput, - addNumbersComponent, - "sh", - test.executorArgs, - bucket, - bucketConfig, - fakeMetadataClient, - "namespace", - fakeKubernetesClientset, - "false", - "", + + // Create mock dependencies + mockAPI := kfpapi.NewMockAPI() + clientManager := client_manager.NewFakeClientManager(fake.NewClientset(), mockAPI) + + // Create test run and task + run := &apiv2beta1.Run{ + RunId: "test-run", + DisplayName: "test-run", + State: apiv2beta1.RuntimeState_RUNNING, + PipelineSource: &apiv2beta1.Run_PipelineSpec{ + PipelineSpec: &structpb.Struct{}, + }, + } + mockAPI.AddRun(run) + + task := &apiv2beta1.PipelineTaskDetail{ + TaskId: "test-task", + RunId: "test-run", + Name: "test-task", + State: apiv2beta1.PipelineTaskDetail_RUNNING, + Inputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{}, + Outputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{}, + } + + // Create launcher options + opts := &LauncherV2Options{ + Namespace: "namespace", + PodName: "test-pod", + PodUID: "test-uid", + PipelineName: "test-pipeline", + ComponentSpec: addNumbersComponent, + Run: run, + Task: task, + PipelineSpec: &structpb.Struct{}, + } + + // Create launcher + launcher, err := NewLauncherV2( + string(executorInputJSON), + append([]string{"sh"}, test.executorArgs...), + opts, + clientManager, ) + assert.Nil(t, err) + + // Setup mocks + mockFS := NewMockFileSystem() + mockCmd := NewMockCommandExecutor() + mockObjStore := NewMockObjectStoreClient() + + mockFS.SetFileContent("/tmp/kfp_outputs/output_metadata.json", []byte("{}")) + mockCmd.RunError = nil + + launcher.WithFileSystem(mockFS). + WithCommandExecutor(mockCmd). + WithObjectStore(mockObjStore) + + // Execute + _, err = launcher.execute(context.Background(), "sh", test.executorArgs) if test.wantErr { assert.NotNil(t, err) } else { assert.Nil(t, err) - } }) } @@ -218,8 +493,7 @@ func Test_executorInput_compileCmdAndArgs(t *testing.T) { "--executor_input", "{{$}}", "--function_to_execute", "sayHello", } - cmd, args, err = compileCmdAndArgs(executorInput, cmd, args) - + _, args, err = compileCmdAndArgs(executorInput, cmd, args) assert.NoError(t, err) var actualExecutorInput string @@ -245,6 +519,181 @@ func Test_executorInput_compileCmdAndArgs(t *testing.T) { assert.Equal(t, "9312", config["sphinx_port"]) } +// Tests executeV2 flow including parameter collection, artifact uploads, and task updates +func Test_executeV2(t *testing.T) { + // Create component spec with input/output parameters and artifacts + componentSpec := &pipelinespec.ComponentSpec{ + InputDefinitions: &pipelinespec.ComponentInputsSpec{ + Parameters: map[string]*pipelinespec.ComponentInputsSpec_ParameterSpec{ + "input_param": { + ParameterType: pipelinespec.ParameterType_STRING, + }, + "optional_param": { + ParameterType: pipelinespec.ParameterType_NUMBER_INTEGER, + DefaultValue: structpb.NewNumberValue(42), + }, + }, + }, + OutputDefinitions: &pipelinespec.ComponentOutputsSpec{ + Parameters: map[string]*pipelinespec.ComponentOutputsSpec_ParameterSpec{ + "output_metric": { + ParameterType: pipelinespec.ParameterType_NUMBER_DOUBLE, + }, + "output_message": { + ParameterType: pipelinespec.ParameterType_STRING, + }, + }, + Artifacts: map[string]*pipelinespec.ComponentOutputsSpec_ArtifactSpec{ + "model": { + ArtifactType: &pipelinespec.ArtifactTypeSchema{ + Kind: &pipelinespec.ArtifactTypeSchema_SchemaTitle{ + SchemaTitle: "system.Model", + }, + }, + }, + }, + }, + } + + // Create executor input with parameters (intentionally omitting optional_param to test defaults) + executorInput := &pipelinespec.ExecutorInput{ + Inputs: &pipelinespec.ExecutorInput_Inputs{ + ParameterValues: map[string]*structpb.Value{ + "input_param": structpb.NewStringValue("test_value"), + }, + }, + Outputs: &pipelinespec.ExecutorInput_Outputs{ + Parameters: map[string]*pipelinespec.ExecutorInput_OutputParameter{ + "output_metric": { + OutputFile: "/tmp/outputs/output_metric", + }, + "output_message": { + OutputFile: "/tmp/outputs/output_message", + }, + }, + Artifacts: map[string]*pipelinespec.ArtifactList{ + "model": { + Artifacts: []*pipelinespec.RuntimeArtifact{ + { + Name: "trained-model", + Uri: "s3://bucket/output/model.pkl", + Type: &pipelinespec.ArtifactTypeSchema{ + Kind: &pipelinespec.ArtifactTypeSchema_SchemaTitle{ + SchemaTitle: "system.Model", + }, + }, + }, + }, + }, + }, + OutputFile: "/tmp/kfp_outputs/output_metadata.json", + }, + } + + executorInputJSON, err := protojson.Marshal(executorInput) + assert.NoError(t, err) + + // Create mock dependencies + mockAPI := kfpapi.NewMockAPI() + clientManager := client_manager.NewFakeClientManager(fake.NewClientset(), mockAPI) + + // Create test run + run := &apiv2beta1.Run{ + RunId: "test-run-123", + DisplayName: "test-run", + State: apiv2beta1.RuntimeState_RUNNING, + PipelineSource: &apiv2beta1.Run_PipelineSpec{ + PipelineSpec: &structpb.Struct{}, + }, + Tasks: []*apiv2beta1.PipelineTaskDetail{}, + } + mockAPI.AddRun(run) + + // Create test task + task := &apiv2beta1.PipelineTaskDetail{ + TaskId: "test-task-456", + RunId: "test-run-123", + Name: "train-model", + State: apiv2beta1.PipelineTaskDetail_RUNNING, + Type: apiv2beta1.PipelineTaskDetail_RUNTIME, + Inputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{}, + Outputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{}, + } + + // Add task to mock API so it can be updated during execution + _, err = mockAPI.CreateTask(context.Background(), &apiv2beta1.CreateTaskRequest{Task: task}) + assert.NoError(t, err) + + // Create task spec + taskSpec := &pipelinespec.PipelineTaskSpec{ + TaskInfo: &pipelinespec.PipelineTaskInfo{ + Name: "train-model", + }, + } + + // Create launcher options + opts := &LauncherV2Options{ + Namespace: "default", + PodName: "train-model-pod", + PodUID: "pod-uid-123", + PipelineName: "training-pipeline", + ComponentSpec: componentSpec, + TaskSpec: taskSpec, + Run: run, + Task: task, + PipelineSpec: &structpb.Struct{}, + } + + // Create launcher + launcher, err := NewLauncherV2( + string(executorInputJSON), + []string{"python", "train.py"}, + opts, + clientManager, + ) + assert.NoError(t, err) + + // Setup mocks + mockFS := NewMockFileSystem() + mockCmd := NewMockCommandExecutor() + mockObjStore := NewMockObjectStoreClient() + + // Configure file system with output parameter values + mockFS.SetFileContent("/tmp/outputs/output_metric", []byte("0.95")) + mockFS.SetFileContent("/tmp/outputs/output_message", []byte("Training completed successfully")) + mockFS.SetFileContent("/tmp/kfp_outputs/output_metadata.json", []byte("{}")) + + // Configure command executor to succeed + mockCmd.RunError = nil + + // Inject mocks + launcher.WithFileSystem(mockFS). + WithCommandExecutor(mockCmd). + WithObjectStore(mockObjStore) + + // Execute executeV2 via ExecuteForTesting + ctx := context.Background() + executorOutput, err := launcher.ExecuteForTesting(ctx) + + // Verify execution succeeded + assert.NoError(t, err) + assert.NotNil(t, executorOutput) + + // Verify output parameters were collected + assert.Contains(t, executorOutput.ParameterValues, "output_metric") + assert.Contains(t, executorOutput.ParameterValues, "output_message") + assert.Equal(t, 0.95, executorOutput.ParameterValues["output_metric"].GetNumberValue()) + assert.Equal(t, "Training completed successfully", executorOutput.ParameterValues["output_message"].GetStringValue()) + + // Verify artifact was uploaded to object store + assert.True(t, mockObjStore.WasUploaded("s3://bucket/output/model.pkl"), "Expected model artifact to be uploaded") + + // Verify batch updater queued artifact creation and task updates + metrics := launcher.batchUpdater.GetMetrics() + assert.Greater(t, metrics["queued_artifacts"], 0, "Expected artifacts to be queued for creation") + assert.Greater(t, metrics["queued_task_updates"], 0, "Expected task updates to be queued") +} + func Test_get_log_Writer(t *testing.T) { old := osCreateFunc defer func() { osCreateFunc = old }() @@ -320,25 +769,22 @@ func Test_get_log_Writer(t *testing.T) { func Test_NewLauncherV2(t *testing.T) { var testCmdArgs = []string{"sh", "-c", "echo \"hello world\""} - disabledCacheClient, _ := cacheutils.NewClient(true, &tls.Config{}) + mockAPI := kfpapi.NewMockAPI() var testLauncherV2Deps = client_manager.NewFakeClientManager( fake.NewSimpleClientset(), - metadata.NewFakeClient(), - disabledCacheClient, + mockAPI, ) var testValidLauncherV2Opts = LauncherV2Options{ - Namespace: "my-namespace", - PodName: "my-pod", - PodUID: "abcd", - MLMDServerAddress: "example.com", - MLMDServerPort: "1234", + Namespace: "my-namespace", + PodName: "my-pod", + PodUID: "abcd", + PipelineName: "test-pipeline", + PipelineSpec: &structpb.Struct{}, } type args struct { - executionID int64 executorInputJSON string - componentSpecJSON string cmdArgs []string opts LauncherV2Options cm client_manager.ClientManagerInterface @@ -351,57 +797,40 @@ func Test_NewLauncherV2(t *testing.T) { { name: "happy path", args: &args{ - executionID: 1, executorInputJSON: "{}", - componentSpecJSON: "{}", cmdArgs: testCmdArgs, opts: testValidLauncherV2Opts, cm: testLauncherV2Deps, }, expectedErr: nil, }, - { - name: "missing executionID", - args: &args{ - executionID: 0, - }, - expectedErr: errors.New("must specify execution ID"), - }, { name: "invalid executorInput", args: &args{ - executionID: 1, executorInputJSON: "{", + cmdArgs: testCmdArgs, + opts: testValidLauncherV2Opts, + cm: testLauncherV2Deps, }, expectedErr: errors.New("unexpected EOF"), }, - { - name: "invalid componentSpec", - args: &args{ - executionID: 1, - executorInputJSON: "{}", - componentSpecJSON: "{", - }, - expectedErr: errors.New("unexpected EOF\ncomponentSpec: {"), - }, { name: "missing cmdArgs", args: &args{ - executionID: 1, executorInputJSON: "{}", - componentSpecJSON: "{}", cmdArgs: []string{}, + opts: testValidLauncherV2Opts, + cm: testLauncherV2Deps, }, expectedErr: errors.New("command and arguments are empty"), }, { name: "invalid opts", args: &args{ - executionID: 1, executorInputJSON: "{}", - componentSpecJSON: "{}", cmdArgs: testCmdArgs, opts: LauncherV2Options{}, + cm: testLauncherV2Deps, }, expectedErr: errors.New("invalid launcher options: must specify Namespace"), }, @@ -409,7 +838,7 @@ func Test_NewLauncherV2(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { args := test.args - _, err := NewLauncherV2(context.Background(), args.executionID, args.executorInputJSON, args.componentSpecJSON, args.cmdArgs, &args.opts, args.cm) + _, err := NewLauncherV2(args.executorInputJSON, args.cmdArgs, &args.opts, args.cm) if test.expectedErr != nil { assert.ErrorContains(t, err, test.expectedErr.Error()) } else { diff --git a/backend/src/v2/component/mocks.go b/backend/src/v2/component/mocks.go new file mode 100644 index 00000000000..eb8f65a4531 --- /dev/null +++ b/backend/src/v2/component/mocks.go @@ -0,0 +1,325 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package component + +import ( + "context" + "fmt" + "io" + "io/fs" + "os" + "sync" + "time" +) + +// MockFileSystem is a mock implementation of FileSystem for testing +type MockFileSystem struct { + mu sync.Mutex + + // Track calls + MkdirAllCalls []MkdirAllCall + CreateCalls []string + ReadFileCalls []string + WriteFileCalls []WriteFileCall + StatCalls []string + + // In-memory file system + files map[string][]byte + dirs map[string]bool + + // Control behavior + MkdirAllError error + CreateError error + ReadFileError error + WriteFileError error + StatError error +} + +type MkdirAllCall struct { + Path string + Perm os.FileMode +} + +type WriteFileCall struct { + Name string + Data []byte + Perm os.FileMode +} + +type mockFileInfo struct { + name string + size int64 +} + +func (m *mockFileInfo) Name() string { return m.name } +func (m *mockFileInfo) Size() int64 { return m.size } +func (m *mockFileInfo) Mode() os.FileMode { return 0644 } +func (m *mockFileInfo) ModTime() time.Time { return time.Now() } +func (m *mockFileInfo) IsDir() bool { return false } +func (m *mockFileInfo) Sys() interface{} { return nil } + +func NewMockFileSystem() *MockFileSystem { + return &MockFileSystem{ + files: make(map[string][]byte), + dirs: make(map[string]bool), + } +} + +func (m *MockFileSystem) MkdirAll(path string, perm os.FileMode) error { + m.mu.Lock() + defer m.mu.Unlock() + + m.MkdirAllCalls = append(m.MkdirAllCalls, MkdirAllCall{Path: path, Perm: perm}) + if m.MkdirAllError != nil { + return m.MkdirAllError + } + m.dirs[path] = true + return nil +} + +func (m *MockFileSystem) Create(name string) (*os.File, error) { + m.mu.Lock() + defer m.mu.Unlock() + + m.CreateCalls = append(m.CreateCalls, name) + if m.CreateError != nil { + return nil, m.CreateError + } + // For mock purposes, we don't return a real file + // Tests should use WriteFile instead + m.files[name] = []byte{} + return nil, nil +} + +func (m *MockFileSystem) ReadFile(name string) ([]byte, error) { + m.mu.Lock() + defer m.mu.Unlock() + + m.ReadFileCalls = append(m.ReadFileCalls, name) + if m.ReadFileError != nil { + return nil, m.ReadFileError + } + data, exists := m.files[name] + if !exists { + return nil, os.ErrNotExist + } + return data, nil +} + +func (m *MockFileSystem) WriteFile(name string, data []byte, perm os.FileMode) error { + m.mu.Lock() + defer m.mu.Unlock() + + m.WriteFileCalls = append(m.WriteFileCalls, WriteFileCall{Name: name, Data: data, Perm: perm}) + if m.WriteFileError != nil { + return m.WriteFileError + } + m.files[name] = data + return nil +} + +func (m *MockFileSystem) Stat(name string) (fs.FileInfo, error) { + m.mu.Lock() + defer m.mu.Unlock() + + m.StatCalls = append(m.StatCalls, name) + if m.StatError != nil { + return nil, m.StatError + } + data, exists := m.files[name] + if !exists { + return nil, os.ErrNotExist + } + return &mockFileInfo{name: name, size: int64(len(data))}, nil +} + +// SetFileContent sets file content for testing. +func (m *MockFileSystem) SetFileContent(name string, data []byte) { + m.mu.Lock() + defer m.mu.Unlock() + m.files[name] = data +} + +// GetFileContent gets file content for assertions. +func (m *MockFileSystem) GetFileContent(name string) ([]byte, bool) { + m.mu.Lock() + defer m.mu.Unlock() + data, exists := m.files[name] + return data, exists +} + +// MockCommandExecutor is a mock implementation of CommandExecutor for testing +type MockCommandExecutor struct { + mu sync.Mutex + + // Track calls + RunCalls []CommandCall + + // Control behavior + RunError error + // Optional: custom function to execute instead + RunFunc func(ctx context.Context, cmd string, args []string, stdin io.Reader, stdout, stderr io.Writer) error +} + +type CommandCall struct { + Cmd string + Args []string + Stdout string + Stderr string +} + +func NewMockCommandExecutor() *MockCommandExecutor { + return &MockCommandExecutor{} +} + +func (m *MockCommandExecutor) Run(ctx context.Context, cmd string, args []string, stdin io.Reader, stdout, stderr io.Writer) error { + m.mu.Lock() + defer m.mu.Unlock() + + call := CommandCall{ + Cmd: cmd, + Args: args, + } + + // If custom function is provided, use it + if m.RunFunc != nil { + err := m.RunFunc(ctx, cmd, args, stdin, stdout, stderr) + m.RunCalls = append(m.RunCalls, call) + return err + } + + // Otherwise use default error or success + m.RunCalls = append(m.RunCalls, call) + return m.RunError +} + +// CallCount returns the number of times Run was called. +func (m *MockCommandExecutor) CallCount() int { + m.mu.Lock() + defer m.mu.Unlock() + return len(m.RunCalls) +} + +// MockObjectStoreClient is a mock implementation of ObjectStoreClientInterface for testing +type MockObjectStoreClient struct { + mu sync.Mutex + + // Track calls + UploadCalls []ArtifactCall + DownloadCalls []ArtifactCall + + // In-memory artifact storage (remoteURI -> data) + artifacts map[string][]byte + + // Control behavior + UploadError error + DownloadError error +} + +type ArtifactCall struct { + LocalPath string + RemoteURI string + ArtifactKey string +} + +func NewMockObjectStoreClient() *MockObjectStoreClient { + return &MockObjectStoreClient{ + artifacts: make(map[string][]byte), + } +} + +func (m *MockObjectStoreClient) UploadArtifact(ctx context.Context, localPath, remoteURI, artifactKey string) error { + m.mu.Lock() + defer m.mu.Unlock() + + m.UploadCalls = append(m.UploadCalls, ArtifactCall{ + LocalPath: localPath, + RemoteURI: remoteURI, + ArtifactKey: artifactKey, + }) + + if m.UploadError != nil { + return m.UploadError + } + + // Simulate upload by storing in memory + m.artifacts[remoteURI] = []byte(fmt.Sprintf("uploaded from %s", localPath)) + return nil +} + +func (m *MockObjectStoreClient) DownloadArtifact(ctx context.Context, remoteURI, localPath, artifactKey string) error { + m.mu.Lock() + defer m.mu.Unlock() + + m.DownloadCalls = append(m.DownloadCalls, ArtifactCall{ + LocalPath: localPath, + RemoteURI: remoteURI, + ArtifactKey: artifactKey, + }) + + if m.DownloadError != nil { + return m.DownloadError + } + + // Check if artifact exists + if _, exists := m.artifacts[remoteURI]; !exists { + return fmt.Errorf("artifact not found: %s", remoteURI) + } + + return nil +} + +// SetArtifact sets artifact content for testing. +func (m *MockObjectStoreClient) SetArtifact(remoteURI string, data []byte) { + m.mu.Lock() + defer m.mu.Unlock() + m.artifacts[remoteURI] = data +} + +// WasUploaded is a helper to check if artifact was uploaded +func (m *MockObjectStoreClient) WasUploaded(remoteURI string) bool { + m.mu.Lock() + defer m.mu.Unlock() + _, exists := m.artifacts[remoteURI] + return exists +} + +// GetUploadCallsForKey is a helper to get upload calls for a specific artifact key +func (m *MockObjectStoreClient) GetUploadCallsForKey(artifactKey string) []ArtifactCall { + m.mu.Lock() + defer m.mu.Unlock() + + var calls []ArtifactCall + for _, call := range m.UploadCalls { + if call.ArtifactKey == artifactKey { + calls = append(calls, call) + } + } + return calls +} + +// GetDownloadCallsForKey is a helper to get download calls for a specific artifact key +func (m *MockObjectStoreClient) GetDownloadCallsForKey(artifactKey string) []ArtifactCall { + m.mu.Lock() + defer m.mu.Unlock() + + var calls []ArtifactCall + for _, call := range m.DownloadCalls { + if call.ArtifactKey == artifactKey { + calls = append(calls, call) + } + } + return calls +} diff --git a/backend/src/v2/component/util.go b/backend/src/v2/component/util.go index 0af3177a8dc..964036bf2ff 100644 --- a/backend/src/v2/component/util.go +++ b/backend/src/v2/component/util.go @@ -1,9 +1,29 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package component import ( "fmt" "io" "os" + "strconv" + "strings" + + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + apiV2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "google.golang.org/protobuf/types/known/structpb" ) // CopyThisBinary copies the running binary into destination path. @@ -41,3 +61,69 @@ func findThisBinary() (string, error) { } return path, nil } + +func textToPbValue(text string, t pipelinespec.ParameterType_ParameterTypeEnum) (*structpb.Value, error) { + msg := func(err error) error { + return fmt.Errorf("TextToPbValue(text=%q, type=%q) failed: %w", text, t, err) + } + switch t { + case pipelinespec.ParameterType_STRING: + return structpb.NewStringValue(text), nil + case pipelinespec.ParameterType_NUMBER_INTEGER: + i, err := strconv.ParseInt(strings.TrimSpace(text), 10, 0) + if err != nil { + return nil, msg(err) + } + return structpb.NewNumberValue(float64(i)), nil + case pipelinespec.ParameterType_NUMBER_DOUBLE: + f, err := strconv.ParseFloat(strings.TrimSpace(text), 64) + if err != nil { + return nil, msg(err) + } + return structpb.NewNumberValue(f), nil + case pipelinespec.ParameterType_BOOLEAN: + v, err := strconv.ParseBool(strings.TrimSpace(text)) + if err != nil { + return nil, msg(err) + } + return structpb.NewBoolValue(v), nil + case pipelinespec.ParameterType_LIST: + v := &structpb.Value{} + if err := v.UnmarshalJSON([]byte(text)); err != nil { + return nil, msg(err) + } + if _, ok := v.GetKind().(*structpb.Value_ListValue); !ok { + return nil, msg(fmt.Errorf("unexpected type")) + } + return v, nil + case pipelinespec.ParameterType_STRUCT: + v := &structpb.Value{} + if err := v.UnmarshalJSON([]byte(text)); err != nil { + return nil, msg(err) + } + if _, ok := v.GetKind().(*structpb.Value_StructValue); !ok { + return nil, msg(fmt.Errorf("unexpected type")) + } + return v, nil + default: + return nil, msg(fmt.Errorf("unknown type. Expected STRING, NUMBER_INTEGER, NUMBER_DOUBLE, BOOLEAN, LIST or STRUCT")) + } +} + +var artifactTypeSchemaToArtifactTypeMap = map[string]apiV2beta1.Artifact_ArtifactType{ + "system.Artifact": apiV2beta1.Artifact_Artifact, + "system.Dataset": apiV2beta1.Artifact_Dataset, + "system.Model": apiV2beta1.Artifact_Model, + "system.Metrics": apiV2beta1.Artifact_Metric, + "system.ClassificationMetrics": apiV2beta1.Artifact_ClassificationMetric, + "system.SlicedClassificationMetrics": apiV2beta1.Artifact_SlicedClassificationMetric, + "system.HTML": apiV2beta1.Artifact_HTML, + "system.Markdown": apiV2beta1.Artifact_Markdown, +} + +func artifactTypeSchemaToArtifactType(typeSchema string) (apiV2beta1.Artifact_ArtifactType, error) { + if artifactType, ok := artifactTypeSchemaToArtifactTypeMap[typeSchema]; ok { + return artifactType, nil + } + return apiV2beta1.Artifact_TYPE_UNSPECIFIED, fmt.Errorf("unknown artifact type: %s", typeSchema) +} diff --git a/backend/src/v2/config/env.go b/backend/src/v2/config/env.go index 4500da8bacb..c7f1ab71dc2 100644 --- a/backend/src/v2/config/env.go +++ b/backend/src/v2/config/env.go @@ -12,8 +12,6 @@ // See the License for the specific language governing permissions and // limitations under the License. -// Package metadata contains types to record/retrieve metadata stored in MLMD -// for individual pipeline steps. package config import ( @@ -21,8 +19,9 @@ import ( "fmt" "os" "strconv" - "strings" + "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/common/util" "github.com/kubeflow/pipelines/backend/src/v2/objectstore" "sigs.k8s.io/yaml" @@ -58,51 +57,17 @@ type Config struct { data map[string]string } -// FromConfigMap loads config from a kfp-launcher Kubernetes config map. -func FromConfigMap(ctx context.Context, clientSet kubernetes.Interface, namespace string) (*Config, error) { - config, err := clientSet.CoreV1().ConfigMaps(namespace).Get(ctx, configMapName, metav1.GetOptions{}) - if err != nil { - if k8errors.IsNotFound(err) { - glog.Infof("cannot find launcher configmap: name=%q namespace=%q, will use default config", configMapName, namespace) - // LauncherConfig is optional, so ignore not found error. - return nil, nil - } - return nil, err - } - return &Config{data: config.Data}, nil -} - // DefaultPipelineRoot gets the configured default pipeline root. func (c *Config) DefaultPipelineRoot() string { // The key defaultPipelineRoot is optional in launcher config. - if c == nil || c.data[configKeyDefaultPipelineRoot] == "" { + if c == nil || c.data == nil { return defaultPipelineRoot } - return c.data[configKeyDefaultPipelineRoot] -} - -// InPodNamespace gets current namespace from inside a Kubernetes Pod. -func InPodNamespace() (string, error) { - // The path is available in Pods. - // https://kubernetes.io/docs/tasks/run-application/access-api-from-pod/#directly-accessing-the-rest-api - ns, err := os.ReadFile("/var/run/secrets/kubernetes.io/serviceaccount/namespace") - if err != nil { - return "", fmt.Errorf("failed to get namespace in Pod: %w", err) - } - return string(ns), nil -} - -// InPodName gets the pod name from inside a Kubernetes Pod. -func InPodName() (string, error) { - if podName, exists := os.LookupEnv("ARGO_POD_NAME"); exists && podName != "" { - return podName, nil - } - podName, err := os.ReadFile("/etc/hostname") - if err != nil { - return "", fmt.Errorf("failed to get pod name in Pod: %w", err) + // Check if key exists and has non-empty value + if val, exists := c.data[configKeyDefaultPipelineRoot]; !exists || val == "" { + return defaultPipelineRoot } - name := string(podName) - return strings.TrimSuffix(name, "\n"), nil + return c.data[configKeyDefaultPipelineRoot] } func (c *Config) GetStoreSessionInfo(path string) (objectstore.SessionInfo, error) { @@ -164,12 +129,50 @@ func (c *Config) getBucketProviders() (*BucketProviders, error) { return bucketProviders, nil } +// FetchLauncherConfigMap loads config from a kfp-launcher Kubernetes config map. +func FetchLauncherConfigMap(ctx context.Context, clientSet kubernetes.Interface, namespace string) (*Config, error) { + config, err := clientSet.CoreV1().ConfigMaps(namespace).Get(ctx, configMapName, metav1.GetOptions{}) + if err != nil { + if k8errors.IsNotFound(err) { + glog.Infof("cannot find launcher configmap: name=%q namespace=%q, will use default config", configMapName, namespace) + // LauncherConfig is optional, so ignore not found error. + return nil, nil + } + return nil, err + } + return &Config{data: config.Data}, nil +} + +// GetPipelineRootWithPipelineRunContext gets the pipeline root for a run. +// The returned Pipeline Root appends the pipeline name and run id. +func GetPipelineRootWithPipelineRunContext( + ctx context.Context, + pipelineName, namespace string, + k8sClient kubernetes.Interface, + run *go_client.Run) (string, error) { + var pipelineRoot string + if run.RuntimeConfig != nil && run.RuntimeConfig.PipelineRoot != "" { + pipelineRoot = run.RuntimeConfig.PipelineRoot + glog.Infof("PipelineRoot=%q from runtime config will be used.", pipelineRoot) + } else { + cfg, err := FetchLauncherConfigMap(ctx, k8sClient, namespace) + if err != nil { + return "", fmt.Errorf("failed to fetch launcher configmap: %w", err) + } + pipelineRoot = cfg.DefaultPipelineRoot() + glog.Infof("PipelineRoot=%q from default config", pipelineRoot) + } + + pipelineRootAppended := util.GenerateOutputURI(pipelineRoot, []string{pipelineName, run.RunId}, true) + return pipelineRootAppended, nil +} + func getDefaultMinioSessionInfo() (objectstore.SessionInfo, error) { sess := objectstore.SessionInfo{ Provider: "minio", Params: map[string]string{ "region": "minio", - "endpoint": objectstore.DefaultMinioEndpointInMultiUserMode, + "endpoint": getDefaultMinioHost(), "disableSSL": strconv.FormatBool(true), "fromEnv": strconv.FormatBool(false), "maxRetries": strconv.FormatInt(int64(5), 10), @@ -181,3 +184,19 @@ func getDefaultMinioSessionInfo() (objectstore.SessionInfo, error) { } return sess, nil } + +func getDefaultMinioHost() string { + endpoint := objectstore.DefaultMinioEndpointInMultiUserMode + var host, port string + if os.Getenv("OBJECT_STORE_HOST") != "" { + host = os.Getenv("OBJECT_STORE_HOST") + } + if os.Getenv("OBJECT_STORE_PORT") != "" { + port = os.Getenv("OBJECT_STORE_PORT") + } + if host != "" && port != "" { + return fmt.Sprintf("%s:%s", host, port) + } else { + return endpoint + } +} diff --git a/backend/src/v2/driver/cache.go b/backend/src/v2/driver/cache.go index 0419beeb3d8..e8582fe5fb3 100644 --- a/backend/src/v2/driver/cache.go +++ b/backend/src/v2/driver/cache.go @@ -18,69 +18,20 @@ import ( "context" "fmt" "sort" - "strconv" - "time" - - "google.golang.org/protobuf/types/known/timestamppb" "github.com/golang/glog" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" - api "github.com/kubeflow/pipelines/backend/api/v1beta1/go_client" + apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/v2/apiclient/kfpapi" "github.com/kubeflow/pipelines/backend/src/v2/cacheutils" - "github.com/kubeflow/pipelines/backend/src/v2/metadata" + "github.com/kubeflow/pipelines/backend/src/v2/driver/common" + "google.golang.org/protobuf/encoding/protojson" ) -func collectOutputArtifactMetadataFromCache(ctx context.Context, executorInput *pipelinespec.ExecutorInput, cachedMLMDExecutionID int64, mlmd *metadata.Client) ([]*metadata.OutputArtifact, error) { - outputArtifacts, err := mlmd.GetOutputArtifactsByExecutionId(ctx, cachedMLMDExecutionID) - if err != nil { - return nil, fmt.Errorf("failed to get MLMDOutputArtifactsByName by executionId %v: %w", cachedMLMDExecutionID, err) - } - - // Register artifacts with MLMD. - registeredMLMDArtifacts := make([]*metadata.OutputArtifact, 0, len(executorInput.GetOutputs().GetArtifacts())) - for name, artifactList := range executorInput.GetOutputs().GetArtifacts() { - if len(artifactList.Artifacts) == 0 { - continue - } - artifact := artifactList.Artifacts[0] - outputArtifact, ok := outputArtifacts[name] - if !ok { - return nil, fmt.Errorf("unable to find artifact with name %v in mlmd output artifacts", name) - } - outputArtifact.Schema = artifact.GetType().GetInstanceSchema() - registeredMLMDArtifacts = append(registeredMLMDArtifacts, outputArtifact) - } - return registeredMLMDArtifacts, nil -} - -func reuseCachedOutputs(ctx context.Context, executorInput *pipelinespec.ExecutorInput, mlmd *metadata.Client, cachedMLMDExecutionID string) (*pipelinespec.ExecutorOutput, []*metadata.OutputArtifact, error) { - cachedMLMDExecutionIDInt64, err := strconv.ParseInt(cachedMLMDExecutionID, 10, 64) - if err != nil { - return nil, nil, fmt.Errorf("failure while transferring cachedMLMDExecutionID %s from string to int64: %w", cachedMLMDExecutionID, err) - } - execution, err := mlmd.GetExecution(ctx, cachedMLMDExecutionIDInt64) - if err != nil { - return nil, nil, fmt.Errorf("failure while getting execution of cachedMLMDExecutionID %v: %w", cachedMLMDExecutionIDInt64, err) - } - executorOutput := &pipelinespec.ExecutorOutput{ - Artifacts: map[string]*pipelinespec.ArtifactList{}, - } - _, outputs, err := execution.GetParameters() - if err != nil { - return nil, nil, fmt.Errorf("failed to collect output parameters from cache: %w", err) - } - executorOutput.ParameterValues = outputs - outputArtifacts, err := collectOutputArtifactMetadataFromCache(ctx, executorInput, cachedMLMDExecutionIDInt64, mlmd) - if err != nil { - return nil, nil, fmt.Errorf("failed collect output artifact metadata from cache: %w", err) - } - return executorOutput, outputArtifacts, nil -} - // getFingerPrint generates a fingerprint for caching. The PVC names are included in the fingerprint since it's assumed // PVCs have side effects (e.g. files written for tasks later on in the run) on the execution. If the PVC names are // different, the execution shouldn't be reused for the cache. -func getFingerPrint(opts Options, executorInput *pipelinespec.ExecutorInput, cacheClient cacheutils.Client, pvcNames []string) (string, error) { +func getFingerPrint(opts common.Options, executorInput *pipelinespec.ExecutorInput, pvcNames []string) (string, error) { outputParametersTypeMap := make(map[string]string) for outputParamName, outputParamSpec := range opts.Component.GetOutputDefinitions().GetParameters() { outputParametersTypeMap[outputParamName] = outputParamSpec.GetParameterType().String() @@ -101,7 +52,7 @@ func getFingerPrint(opts Options, executorInput *pipelinespec.ExecutorInput, cac } sort.Strings(sortedPVCNames) - cacheKey, err := cacheClient.GenerateCacheKey( + cacheKey, err := cacheutils.GenerateCacheKey( executorInput.GetInputs(), executorInput.GetOutputs(), outputParametersTypeMap, @@ -112,53 +63,68 @@ func getFingerPrint(opts Options, executorInput *pipelinespec.ExecutorInput, cac if err != nil { return "", fmt.Errorf("failure while generating CacheKey: %w", err) } - fingerPrint, err := cacheClient.GenerateFingerPrint(cacheKey) + fingerPrint, err := cacheutils.GenerateFingerPrint(cacheKey) return fingerPrint, err } -func getFingerPrintsAndID(execution *Execution, opts *Options, cacheClient cacheutils.Client, pvcNames []string) (string, string, error) { - if !opts.CacheDisabled && execution.WillTrigger() && opts.Task.GetCachingOptions().GetEnableCache() { - glog.Infof("Task {%s} enables cache", opts.Task.GetTaskInfo().GetName()) - fingerPrint, err := getFingerPrint(*opts, execution.ExecutorInput, cacheClient, pvcNames) - if err != nil { - return "", "", fmt.Errorf("failure while getting fingerPrint: %w", err) - } - cachedMLMDExecutionID, err := cacheClient.GetExecutionCache(fingerPrint, "pipeline/"+opts.PipelineName, opts.Namespace) - if err != nil { - return "", "", fmt.Errorf("failure while getting executionCache: %w", err) - } - return fingerPrint, cachedMLMDExecutionID, nil - } else { - return "", "", nil +func getFingerPrintsAndID( + ctx context.Context, + execution *Execution, + kfpAPI kfpapi.API, + opts *common.Options, + pvcNames []string) (fingerprint string, task *apiv2beta1.PipelineTaskDetail, err error) { + + if opts.CacheDisabled || !execution.WillTrigger() || !opts.Task.GetCachingOptions().GetEnableCache() { + return "", nil, nil } -} -func createCache( - ctx context.Context, - execution *metadata.Execution, - opts *Options, - taskStartedTime int64, - fingerPrint string, - cacheClient cacheutils.Client, -) error { - id := execution.GetID() - if id == 0 { - return fmt.Errorf("failed to get id from createdExecution") + glog.Infof("Task {%s} enables cache", opts.Task.GetTaskInfo().GetName()) + fingerPrint, err := getFingerPrint(*opts, execution.ExecutorInput, pvcNames) + if err != nil { + return "", nil, fmt.Errorf("failure while getting fingerPrint: %w", err) + } + + predicates := []*apiv2beta1.Predicate{ + { + Operation: apiv2beta1.Predicate_EQUALS, + Key: "cache_fingerprint", + Value: &apiv2beta1.Predicate_StringValue{StringValue: fingerPrint}, + }, + { + Operation: apiv2beta1.Predicate_EQUALS, + Key: "status", + Value: &apiv2beta1.Predicate_IntValue{IntValue: int32(apiv2beta1.PipelineTaskDetail_SUCCEEDED)}, + }, } - task := &api.Task{ - // TODO how to differentiate between shared pipeline and namespaced pipeline - PipelineName: "pipeline/" + opts.PipelineName, - Namespace: opts.Namespace, - RunId: opts.RunID, - MlmdExecutionID: strconv.FormatInt(id, 10), - CreatedAt: timestamppb.New(time.Unix(taskStartedTime, 0)), - FinishedAt: timestamppb.New(time.Unix(time.Now().Unix(), 0)), - Fingerprint: fingerPrint, + + filter := &apiv2beta1.Filter{ + Predicates: predicates, + } + mo := protojson.MarshalOptions{ + UseProtoNames: true, + EmitUnpopulated: false, + } + filterJSON, err := mo.Marshal(filter) + if err != nil { + return "", nil, fmt.Errorf("failed to marshal filter: %v", err) } - err := cacheClient.CreateExecutionCache(ctx, task) + + glog.V(4).Infof("Looking for cached tasks with: filter=%s, namespace=%s", filterJSON, opts.Namespace) + tasks, err := kfpAPI.ListTasks(ctx, &apiv2beta1.ListTasksRequest{ + ParentFilter: &apiv2beta1.ListTasksRequest_Namespace{Namespace: opts.Namespace}, + Filter: string(filterJSON), + }) if err != nil { - return err + return "", nil, fmt.Errorf("failure while listing tasks: %w", err) + } + + if len(tasks.Tasks) == 0 { + glog.Infof("No cached tasks found for task {%s}", opts.Task.GetTaskInfo().GetName()) + return fingerPrint, nil, nil + } else if len(tasks.Tasks) > 1 { + glog.Infof("Found multiple cached tasks for task %s with fingerprint %s, the first one found will be used.", opts.Task.GetTaskInfo().GetName(), fingerprint) } - glog.Infof("Created cache entry.") - return nil + + glog.V(4).Infof("Got a cache hit for task {%s}", opts.Task.GetTaskInfo().GetName()) + return fingerPrint, tasks.Tasks[0], nil } diff --git a/backend/src/v2/driver/common/common_test.go b/backend/src/v2/driver/common/common_test.go new file mode 100644 index 00000000000..a654267ef3d --- /dev/null +++ b/backend/src/v2/driver/common/common_test.go @@ -0,0 +1,100 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package common + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_isInputParameterChannel(t *testing.T) { + tests := []struct { + name string + input string + isValid bool + }{ + { + name: "wellformed pipeline channel should produce no errors", + input: "{{$.inputs.parameters['pipelinechannel--someParameterName']}}", + isValid: true, + }, + { + name: "pipeline channel index should have quotes", + input: "{{$.inputs.parameters[pipelinechannel--someParameterName]}}", + isValid: false, + }, + { + name: "plain text as pipelinechannel of parameter type is invalid", + input: "randomtext", + isValid: false, + }, + { + name: "inputs should be prefixed with $.", + input: "{{inputs.parameters['pipelinechannel--someParameterName']}}", + isValid: false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + assert.Equal(t, IsInputParameterChannel(test.input), test.isValid) + }) + } +} + +func Test_extractInputParameterFromChannel(t *testing.T) { + tests := []struct { + name string + input string + expected string + wantErr bool + }{ + { + name: "standard parameter pipeline channel input", + input: "{{$.inputs.parameters['pipelinechannel--someParameterName']}}", + expected: "pipelinechannel--someParameterName", + wantErr: false, + }, + { + name: "a more complex parameter pipeline channel input", + input: "{{$.inputs.parameters['pipelinechannel--somePara-me_terName']}}", + expected: "pipelinechannel--somePara-me_terName", + wantErr: false, + }, + { + name: "invalid input should return err", + input: "invalidvalue", + wantErr: true, + }, + { + name: "invalid input should return err 2", + input: "pipelinechannel--somePara-me_terName", + wantErr: true, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + actual, err := extractInputParameterFromChannel(test.input) + if test.wantErr { + assert.NotNil(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, actual, test.expected) + } + }) + } +} diff --git a/backend/src/v2/driver/common/comon.go b/backend/src/v2/driver/common/comon.go new file mode 100644 index 00000000000..addc3f324fc --- /dev/null +++ b/backend/src/v2/driver/common/comon.go @@ -0,0 +1,206 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package common provides common utilities for the KFP v2 driver. +package common + +import ( + "fmt" + "regexp" + "strings" + + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/kubernetes_platform/go/kubernetesplatform" + "google.golang.org/protobuf/types/known/structpb" +) + +// Options contain driver options +type Options struct { + // required, pipeline context name + PipelineName string + // required, KFP run ID + Run *apiv2beta1.Run + // required, Component spec + Component *pipelinespec.ComponentSpec + // required + ParentTask *apiv2beta1.PipelineTaskDetail + // required + ScopePath util.ScopePath + + // optional, iteration index. -1 means not an iteration. + IterationIndex int + + // optional, required only by root DAG driver + RuntimeConfig *pipelinespec.PipelineJob_RuntimeConfig + Namespace string + + // optional, required by non-root drivers + Task *pipelinespec.PipelineTaskSpec + + // optional, required only by container driver + Container *pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec + + // optional, allows to specify kubernetes-specific executor config + KubernetesExecutorConfig *kubernetesplatform.KubernetesExecutorConfig + + // optional, required only if the {{$.pipeline_job_resource_name}} placeholder is used or the run uses a workspace + RunName string + // optional, required only if the {{$.pipeline_job_name}} placeholder is used + RunDisplayName string + PipelineLogLevel string + PublishLogs string + CacheDisabled bool + DriverType string + TaskName string + PodName string + PodUID string + MLPipelineTLSEnabled bool + CaCertPath string +} + +// Info provides information used for debugging +func (o Options) Info() string { + msg := fmt.Sprintf("pipelineName=%v, runID=%v", o.PipelineName, o.Run.GetRunId()) + if o.Task.GetTaskInfo().GetName() != "" { + msg += fmt.Sprintf(", taskDisplayName=%q", o.Task.GetTaskInfo().GetName()) + } + if o.TaskName != "" { + msg += fmt.Sprintf(", taskName=%q", o.TaskName) + } + if o.Task.GetComponentRef().GetName() != "" { + msg += fmt.Sprintf(", component=%q", o.Task.GetComponentRef().GetName()) + } + if o.ParentTask != nil { + msg += fmt.Sprintf(", dagExecutionID=%v", o.ParentTask.GetParentTaskId()) + } + if o.IterationIndex >= 0 { + msg += fmt.Sprintf(", iterationIndex=%v", o.IterationIndex) + } + if o.RuntimeConfig != nil { + msg += ", runtimeConfig" // this only means runtimeConfig is not empty + } + if o.Component.GetImplementation() != nil { + msg += ", componentSpec" // this only means componentSpec is not empty + } + if o.KubernetesExecutorConfig != nil { + msg += ", KubernetesExecutorConfig" // this only means KubernetesExecutorConfig is not empty + } + return msg +} + +const pipelineChannelPrefix = "pipelinechannel--" + +func IsLoopArgument(name string) bool { + // Remove prefix + nameWithoutPrefix := strings.TrimPrefix(name, pipelineChannelPrefix) + return strings.HasSuffix(nameWithoutPrefix, "loop-item") || strings.HasPrefix(nameWithoutPrefix, "loop-item") +} + +func IsRuntimeIterationTask(task *apiv2beta1.PipelineTaskDetail) bool { + return task.Type == apiv2beta1.PipelineTaskDetail_RUNTIME && task.TypeAttributes != nil && task.TypeAttributes.IterationIndex != nil +} + +// inputPipelineChannelPattern define a regex pattern to match the content within single quotes +// example input channel looks like "{{$.inputs.parameters['pipelinechannel--val']}}" +const inputPipelineChannelPattern = `\$.inputs.parameters\['(.+?)'\]` + +func IsInputParameterChannel(inputChannel string) bool { + re := regexp.MustCompile(inputPipelineChannelPattern) + match := re.FindStringSubmatch(inputChannel) + if len(match) == 2 { + return true + } else { + // if len(match) > 2, then this is still incorrect because + // inputChannel should contain only one parameter channel input + return false + } +} + +// extractInputParameterFromChannel takes an inputChannel that adheres to +// inputPipelineChannelPattern and extracts the channel parameter name. +// For example given an input channel of the form "{{$.inputs.parameters['pipelinechannel--val']}}" +// the channel parameter name "pipelinechannel--val" is returned. +func extractInputParameterFromChannel(inputChannel string) (string, error) { + re := regexp.MustCompile(inputPipelineChannelPattern) + match := re.FindStringSubmatch(inputChannel) + if len(match) > 1 { + extractedValue := match[1] + return extractedValue, nil + } else { + return "", fmt.Errorf("failed to extract input parameter from channel: %s", inputChannel) + } +} + +// InputParamConstant convert and return value as a RuntimeValue +func InputParamConstant(value string) *pipelinespec.TaskInputsSpec_InputParameterSpec { + return &pipelinespec.TaskInputsSpec_InputParameterSpec{ + Kind: &pipelinespec.TaskInputsSpec_InputParameterSpec_RuntimeValue{ + RuntimeValue: &pipelinespec.ValueOrRuntimeParameter{ + Value: &pipelinespec.ValueOrRuntimeParameter_Constant{ + Constant: structpb.NewStringValue(value), + }, + }, + }, + } +} + +// InputParamComponent convert and return value as a ComponentInputParameter +func InputParamComponent(value string) *pipelinespec.TaskInputsSpec_InputParameterSpec { + return &pipelinespec.TaskInputsSpec_InputParameterSpec{ + Kind: &pipelinespec.TaskInputsSpec_InputParameterSpec_ComponentInputParameter{ + ComponentInputParameter: value, + }, + } +} + +// InputParamTaskOutput convert and return producerTask & outputParamKey +// as a TaskOutputParameter. +func InputParamTaskOutput(producerTask, outputParamKey string) *pipelinespec.TaskInputsSpec_InputParameterSpec { + return &pipelinespec.TaskInputsSpec_InputParameterSpec{ + Kind: &pipelinespec.TaskInputsSpec_InputParameterSpec_TaskOutputParameter{ + TaskOutputParameter: &pipelinespec.TaskInputsSpec_InputParameterSpec_TaskOutputParameterSpec{ + ProducerTask: producerTask, + OutputParameterKey: outputParamKey, + }, + }, + } +} + +var paramPattern = regexp.MustCompile(`{{\$.inputs.parameters\['([^']+)'\]}}`) + +// ParsePipelineParam takes a string and returns (isMatch, isPipelineChannel, paramName) +func ParsePipelineParam(s string) (bool, bool, string) { + paramName, ok := extractParameterName(s) + if !ok { + return false, false, "" + } + return true, isPipelineChannel(paramName), paramName +} + +// extractParameterName extracts the inner value inside the brackets ['...'] +// e.g. returns "pipelinechannel--cpu_limit" from "{{$.inputs.parameters['pipelinechannel--cpu_limit']}}" +func extractParameterName(s string) (string, bool) { + matches := paramPattern.FindStringSubmatch(s) + if len(matches) > 1 { + return matches[1], true + } + return "", false +} + +// isPipelineChannel checks if a parameter name follows the "pipelinechannel--" prefix convention +func isPipelineChannel(name string) bool { + return strings.HasPrefix(name, "pipelinechannel--") +} diff --git a/backend/src/v2/driver/container.go b/backend/src/v2/driver/container.go index 7bb98f49ab3..f40179f137c 100644 --- a/backend/src/v2/driver/container.go +++ b/backend/src/v2/driver/container.go @@ -1,17 +1,3 @@ -// Copyright 2025 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - package driver import ( @@ -23,66 +9,124 @@ import ( "github.com/golang/glog" "github.com/google/uuid" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" - "github.com/kubeflow/pipelines/backend/src/v2/cacheutils" + apiV2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/backend/src/v2/client_manager" + "github.com/kubeflow/pipelines/backend/src/v2/config" + "github.com/kubeflow/pipelines/backend/src/v2/driver/common" + "github.com/kubeflow/pipelines/backend/src/v2/driver/resolver" "github.com/kubeflow/pipelines/backend/src/v2/expression" - "github.com/kubeflow/pipelines/backend/src/v2/metadata" - pb "github.com/kubeflow/pipelines/third_party/ml-metadata/go/ml_metadata" "google.golang.org/protobuf/encoding/protojson" "google.golang.org/protobuf/types/known/structpb" + "google.golang.org/protobuf/types/known/timestamppb" ) -func validateContainer(opts Options) (err error) { +func Container(ctx context.Context, opts common.Options, clientManager client_manager.ClientManagerInterface) (execution *Execution, driverErr error) { defer func() { - if err != nil { - err = fmt.Errorf("invalid container driver args: %w", err) + if driverErr != nil { + driverErr = fmt.Errorf("driver.Container(%s) failed: %w", opts.Info(), driverErr) } }() - if opts.Container == nil { - return fmt.Errorf("container spec is required") + b, driverErr := json.Marshal(opts) + if driverErr != nil { + return nil, driverErr } - return validateNonRoot(opts) -} - -func Container(ctx context.Context, opts Options, mlmd *metadata.Client, cacheClient cacheutils.Client) (execution *Execution, err error) { - defer func() { - if err != nil { - err = fmt.Errorf("driver.Container(%s) failed: %w", opts.info(), err) - } - }() - b, _ := json.Marshal(opts) glog.V(4).Info("Container opts: ", string(b)) - err = validateContainer(opts) - if err != nil { - return nil, err + + if clientManager == nil { + return nil, fmt.Errorf("kfpAPI client is nil") } + if opts.TaskName == "" { + return nil, fmt.Errorf("task name flag is required for Container") + } + if opts.ParentTask == nil { + return nil, fmt.Errorf("parent task is required for Runtime Task") + } + var iterationIndex *int if opts.IterationIndex >= 0 { - index := opts.IterationIndex - iterationIndex = &index + idx := opts.IterationIndex + iterationIndex = &idx } - // TODO(Bobgy): there's no need to pass any parameters, because pipeline - // and pipeline run context have been created by root DAG driver. - pipeline, err := mlmd.GetPipeline(ctx, opts.PipelineName, opts.RunID, "", "", "", "") - if err != nil { - return nil, err + + expr, driverErr := expression.New() + if driverErr != nil { + return nil, driverErr } - dag, err := mlmd.GetDAG(ctx, opts.DAGExecutionID) - if err != nil { - return nil, err + + parentTask, driverErr := clientManager.KFPAPIClient().GetTask(ctx, &apiV2beta1.GetTaskRequest{TaskId: opts.ParentTask.GetTaskId()}) + if driverErr != nil { + return nil, driverErr } - glog.Infof("parent DAG: %+v", dag.Execution) - expr, err := expression.New() - if err != nil { - return nil, err + opts.ParentTask = parentTask + + taskToCreate := &apiV2beta1.PipelineTaskDetail{ + Name: opts.TaskName, + DisplayName: opts.Task.GetTaskInfo().GetName(), + RunId: opts.Run.GetRunId(), + Type: apiV2beta1.PipelineTaskDetail_RUNTIME, + State: apiV2beta1.PipelineTaskDetail_RUNNING, + ParentTaskId: util.StringPointer(opts.ParentTask.TaskId), + ScopePath: opts.ScopePath.StringPath(), + StartTime: timestamppb.Now(), + CreateTime: timestamppb.Now(), + Pods: []*apiV2beta1.PipelineTaskDetail_TaskPod{ + { + Name: opts.PodName, + Uid: opts.PodUID, + Type: apiV2beta1.PipelineTaskDetail_DRIVER, + }, + }, } - inputs, err := resolveInputs(ctx, dag, iterationIndex, pipeline, opts, mlmd, expr) - if err != nil { - return nil, err + + // Ensure we capture and propagate any errors. + defer func() { + if driverErr != nil { + taskToCreate.State = apiV2beta1.PipelineTaskDetail_FAILED + taskToCreate.EndTime = timestamppb.Now() + taskToCreate.StatusMetadata = &apiV2beta1.PipelineTaskDetail_StatusMetadata{ + Message: driverErr.Error(), + } + // We encountered an error in driver before we got the chance to create the task. + if taskToCreate.TaskId == "" { + _, err := clientManager.KFPAPIClient().CreateTask(ctx, &apiV2beta1.CreateTaskRequest{Task: taskToCreate}) + if err != nil { + glog.Errorf("Failed to Create task %s: %v", taskToCreate.Name, err) + } + } else { + _, err := clientManager.KFPAPIClient().UpdateTask(ctx, &apiV2beta1.UpdateTaskRequest{Task: taskToCreate}) + if err != nil { + glog.Errorf("Failed to update task %s: %v", taskToCreate.Name, err) + } + } + } + + fullView := apiV2beta1.GetRunRequest_FULL + refreshedRun, getRunErr := clientManager.KFPAPIClient().GetRun(ctx, &apiV2beta1.GetRunRequest{RunId: opts.Run.GetRunId(), View: &fullView}) + if getRunErr != nil { + glog.Errorf("failed to refresh run: %w", getRunErr) + return + } + opts.Run = refreshedRun + err := clientManager.KFPAPIClient().UpdateStatuses(ctx, opts.Run, opts.ScopePath.GetPipelineSpecStruct(), taskToCreate) + if err != nil { + glog.Errorf("Failed to update statuses: %v", err) + return + } + }() + + // Resolve inputs + inputs, _, driverErr := resolver.ResolveInputs(ctx, opts) + if driverErr != nil { + return nil, driverErr } - executorInput := &pipelinespec.ExecutorInput{ - Inputs: inputs, + // Convert inputs to executor inputs. + executorInput, driverErr := pipelineTaskInputsToExecutorInputs(inputs) + if driverErr != nil { + return nil, fmt.Errorf("failed to convert inputs to executor inputs: %w", driverErr) } + execution = &Execution{ExecutorInput: executorInput} condition := opts.Task.GetTriggerPolicy().GetCondition() if condition != "" { @@ -96,13 +140,13 @@ func Container(ctx context.Context, opts Options, mlmd *metadata.Client, cacheCl // When the container image is a dummy image, there is no launcher for this // task. This happens when this task is created to implement a // Kubernetes-specific configuration, i.e., there is no user container to - // run. It publishes execution details to mlmd in driver and takes care of - // caching, which are usually done in launcher. We also skip creating the + // run. It creates a task in the driver and takes care of. + // caching, which is usually done in launcher. We also skip creating the // podspecpatch in these cases. _, isKubernetesPlatformOp := dummyImages[opts.Container.Image] if isKubernetesPlatformOp { // To be consistent with other artifacts, the driver registers log - // artifacts to MLMD and the launcher publishes them to the object + // artifacts to KFP and the launcher publishes them to the object // store. This pattern does not work for kubernetesPlatformOps because // they have no launcher. There's no point in registering logs that // won't be published. Consequently, when we know we're dealing with @@ -111,47 +155,35 @@ func Container(ctx context.Context, opts Options, mlmd *metadata.Client, cacheCl opts.PublishLogs = "false" } - if execution.WillTrigger() { - executorInput.Outputs = provisionOutputs( - pipeline.GetPipelineRoot(), - opts.TaskName, - opts.Component.GetOutputDefinitions(), - uuid.NewString(), - opts.PublishLogs, - ) + // If this is an iteration runtime task, set the iteration index. + if iterationIndex != nil { + taskToCreate.TypeAttributes = &apiV2beta1.PipelineTaskDetail_TypeAttributes{IterationIndex: util.Int64Pointer(int64(*iterationIndex))} } - ecfg, err := metadata.GenerateExecutionConfig(executorInput) - if err != nil { - return execution, err - } - ecfg.TaskName = opts.TaskName - ecfg.DisplayName = opts.Task.GetTaskInfo().GetName() - ecfg.ExecutionType = metadata.ContainerExecutionTypeName - ecfg.ParentDagID = dag.Execution.GetID() - ecfg.IterationIndex = iterationIndex - ecfg.NotTriggered = !execution.WillTrigger() - + // Handle Kubernetes-specific tasks such as pvc-creation or pvc-deletion if isKubernetesPlatformOp { - return execution, kubernetesPlatformOps(ctx, mlmd, cacheClient, execution, ecfg, &opts) + return execution, kubernetesPlatformOps(ctx, clientManager, execution, taskToCreate, &opts) } - var inputParams map[string]*structpb.Value - + var inputParams []*apiV2beta1.PipelineTaskDetail_InputOutputs_IOParameter if opts.KubernetesExecutorConfig != nil { - inputParams, _, err = dag.Execution.GetParameters() - if err != nil { - return nil, fmt.Errorf("failed to fetch input parameters from execution: %w", err) + inputParams = parentTask.GetInputs().GetParameters() + if driverErr != nil { + return nil, fmt.Errorf("failed to fetch input parameters from task: %w", driverErr) } } + // Generate a fingerprint and check if we have a cache hit. + var fingerPrint string + var cachedTask *apiV2beta1.PipelineTaskDetail if !opts.CacheDisabled { - // Generate fingerprint and MLMD ID for cache + // Generate fingerprint // Start by getting the names of the PVCs that need to be mounted. - pvcNames := []string{} + var pvcNames []string if opts.KubernetesExecutorConfig != nil && opts.KubernetesExecutorConfig.GetPvcMount() != nil { - _, volumes, err := makeVolumeMountPatch(ctx, opts, opts.KubernetesExecutorConfig.GetPvcMount(), - dag, pipeline, mlmd, inputParams) + _, volumes, err := makeVolumeMountPatch( + opts, opts.KubernetesExecutorConfig.GetPvcMount(), + inputParams) if err != nil { return nil, fmt.Errorf("failed to extract volume mount info while generating fingerprint: %w", err) } @@ -169,76 +201,138 @@ func Container(ctx context.Context, opts Options, mlmd *metadata.Client, cacheCl pvcNames = append(pvcNames, GetWorkspacePVCName(opts.RunName)) } - fingerPrint, cachedMLMDExecutionID, err := getFingerPrintsAndID(execution, &opts, cacheClient, pvcNames) - if err != nil { - return execution, err + fingerPrint, cachedTask, driverErr = getFingerPrintsAndID(ctx, execution, clientManager.KFPAPIClient(), &opts, pvcNames) + if driverErr != nil { + return execution, driverErr } - ecfg.CachedMLMDExecutionID = cachedMLMDExecutionID - ecfg.FingerPrint = fingerPrint + taskToCreate.CacheFingerprint = fingerPrint } - // TODO(Bobgy): change execution state to pending, because this is driver, execution hasn't started. - createdExecution, err := mlmd.CreateExecution(ctx, pipeline, ecfg) - if err != nil { - return execution, err - } - glog.Infof("Created execution: %s", createdExecution) - execution.ID = createdExecution.GetID() - if !execution.WillTrigger() { - return execution, nil - } - - // Use cache and skip launcher if all contions met: + // Use cache and skip pvc creation if all conditions met: // (1) Cache is enabled globally // (2) Cache is enabled for the task - // (3) CachedMLMDExecutionID is non-empty, which means a cache entry exists - cached := false - execution.Cached = &cached + // (3) We had a cache hit for this Task + execution.Cached = util.BoolPointer(false) if !opts.CacheDisabled { - if opts.Task.GetCachingOptions().GetEnableCache() && ecfg.CachedMLMDExecutionID != "" { - executorOutput, outputArtifacts, err := reuseCachedOutputs(ctx, execution.ExecutorInput, mlmd, ecfg.CachedMLMDExecutionID) - if err != nil { - return execution, err + if opts.Task.GetCachingOptions().GetEnableCache() && cachedTask != nil { + taskToCreate.State = apiV2beta1.PipelineTaskDetail_CACHED + taskToCreate.Outputs = cachedTask.Outputs + *execution.Cached = true + createdTask, createErr := clientManager.KFPAPIClient().CreateTask(ctx, &apiV2beta1.CreateTaskRequest{ + Task: taskToCreate, + }) + if createErr != nil { + return execution, fmt.Errorf("failed to update task: %w", createErr) } - // TODO(Bobgy): upload output artifacts. - // TODO(Bobgy): when adding artifacts, we will need execution.pipeline to be non-nil, because we need - // to publish output artifacts to the context too. - if err := mlmd.PublishExecution(ctx, createdExecution, executorOutput.GetParameterValues(), outputArtifacts, pb.Execution_CACHED); err != nil { - return execution, fmt.Errorf("failed to publish cached execution: %w", err) + + // Artifacts are not embedded in tasks like parameters, we need to create separate ArtifactTasks for each output. + var artifactTasks []*apiV2beta1.ArtifactTask + for _, cachedOutput := range cachedTask.Outputs.Artifacts { + for _, artifact := range cachedOutput.Artifacts { + artifactTasks = append(artifactTasks, &apiV2beta1.ArtifactTask{ + ArtifactId: artifact.GetArtifactId(), + RunId: createdTask.RunId, + TaskId: createdTask.TaskId, + Type: cachedOutput.GetType(), + Producer: cachedOutput.GetProducer(), + Key: cachedOutput.ArtifactKey, + }) + } } - glog.Infof("Use cache for task %s", opts.Task.GetTaskInfo().GetName()) - *execution.Cached = true + _, err := clientManager.KFPAPIClient().CreateArtifactTasks(ctx, &apiV2beta1.CreateArtifactTasksBulkRequest{ + ArtifactTasks: artifactTasks, + }) + if err != nil { + return execution, fmt.Errorf("failed to create artifact tasks: %w", err) + } + execution.TaskID = createdTask.TaskId + glog.Infof("Cache hit for task %s", opts.TaskName) return execution, nil } } else { glog.Info("Cache disabled globally at the server level.") } - taskConfig := &TaskConfig{} + taskToCreate, driverErr = handleInputTaskParametersCreation(inputs.Parameters, taskToCreate) + if driverErr != nil { + return execution, driverErr + } + + if !execution.WillTrigger() { + taskToCreate.State = apiV2beta1.PipelineTaskDetail_SKIPPED + } + + glog.Infof("Creating task %s in pod %s", opts.TaskName, opts.Namespace) + createdTask, driverErr := clientManager.KFPAPIClient().CreateTask(ctx, &apiV2beta1.CreateTaskRequest{Task: taskToCreate}) + if driverErr != nil { + return execution, driverErr + } + execution.TaskID = createdTask.TaskId + + // Create ArtifactTasks for each Artifact Input. + driverErr = handleInputTaskArtifactsCreation(ctx, opts, inputs.Artifacts, createdTask, clientManager.KFPAPIClient()) + if driverErr != nil { + return execution, driverErr + } + + // If this Task is a condition branch and the condition was not met, skip it. + if !execution.WillTrigger() { + return execution, nil + } - podSpec, err := initPodSpecPatch( + // Determine the pipeline root with the pipeline run context. + // If a user sets a pipeline root at the runtime config, use that. + // Otherwise, we use the default pipeline root from the launcher config map. + // If none is set, we use the hardcoded default. + pipelineRoot, driverErr := config.GetPipelineRootWithPipelineRunContext( + ctx, + opts.PipelineName, + opts.Namespace, + clientManager.K8sClient(), + opts.Run) + if driverErr != nil { + return execution, fmt.Errorf("failed to get pipeline root: %w", driverErr) + } + + // Provision Outputs in ExecutorInput + if execution.WillTrigger() { + executorInput.Outputs = provisionOutputs( + pipelineRoot, + opts.TaskName, + opts.Component.GetOutputDefinitions(), + uuid.NewString(), + opts.PublishLogs, + ) + } + + // Generate pod spec patch. + taskConfig := &TaskConfig{} + podSpec, driverErr := initPodSpecPatch( opts.Container, opts.Component, executorInput, - execution.ID, + execution.TaskID, + parentTask.GetTaskId(), opts.PipelineName, - opts.RunID, + opts.Run.GetRunId(), opts.RunName, opts.PipelineLogLevel, opts.PublishLogs, strconv.FormatBool(opts.CacheDisabled), taskConfig, + fingerPrint, + iterationIndex, + opts.TaskName, opts.MLPipelineTLSEnabled, - opts.MLMDTLSEnabled, opts.CaCertPath, ) - if err != nil { - return execution, err + if driverErr != nil { + return execution, driverErr } if opts.KubernetesExecutorConfig != nil { - err = extendPodSpecPatch(ctx, podSpec, opts, dag, pipeline, mlmd, inputParams, taskConfig) - if err != nil { - return execution, err + driverErr = extendPodSpecPatch(ctx, podSpec, opts, inputParams, taskConfig) + if driverErr != nil { + return execution, driverErr } } @@ -270,36 +364,192 @@ func Container(ctx context.Context, opts Options, mlmd *metadata.Client, cacheCl } } - ecfg.InputParameters = executorInput.Inputs.ParameterValues - // Overwrite the --executor_input argument in the podSpec container command with the updated executorInput executorInputJSON, err := protojson.Marshal(executorInput) if err != nil { return execution, fmt.Errorf("JSON marshaling executor input: %w", err) } - for index, container := range podSpec.Containers { if container.Name == "main" { cmd := container.Command for i := 0; i < len(cmd)-1; i++ { if cmd[i] == "--executor_input" { podSpec.Containers[index].Command[i+1] = string(executorInputJSON) - break } } - break } } - execution.ExecutorInput = executorInput } - podSpecPatchBytes, err := json.Marshal(podSpec) - if err != nil { - return execution, fmt.Errorf("JSON marshaling pod spec patch: %w", err) + podSpecPatchBytes, driverErr := json.Marshal(podSpec) + if driverErr != nil { + return execution, fmt.Errorf("JSON marshaling pod spec patch: %w", driverErr) } execution.PodSpecPatch = string(podSpecPatchBytes) return execution, nil } + +func pipelineTaskInputsToExecutorInputs(inputMetadata *resolver.InputMetadata) (*pipelinespec.ExecutorInput, error) { + parameters := make(map[string]*structpb.Value) + artifacts := make(map[string]*pipelinespec.ArtifactList) + for _, p := range inputMetadata.Parameters { + if p.ParameterIO.GetValue() == nil { + return nil, fmt.Errorf("parameter %s has no value", p.Key) + } + if p.ParameterIO.GetType() == apiV2beta1.IOType_ITERATOR_INPUT { + // first check if p.Key is already present in parameters + if _, ok := parameters[p.Key]; ok { + // if present, then append to the existing value + err := addValueToStructPBList(parameters[p.Key], p.ParameterIO.GetValue()) + if err != nil { + return nil, fmt.Errorf("failed to append value to existing parameter %s: %w", p.Key, err) + } + } else { + parameters[p.Key] = resolver.ToListValue([]*structpb.Value{ + p.ParameterIO.GetValue(), + }) + } + } else { + parameters[p.Key] = p.ParameterIO.GetValue() + } + } + for _, a := range inputMetadata.Artifacts { + artifactsList, err := convertArtifactsToArtifactList(a.ArtifactIO.GetArtifacts()) + if err != nil { + return nil, err + } + artifacts[a.Key] = artifactsList + } + executorInput := &pipelinespec.ExecutorInput{ + Inputs: &pipelinespec.ExecutorInput_Inputs{ + ParameterValues: parameters, + Artifacts: artifacts, + }, + } + return executorInput, nil +} + +func convertArtifactsToArtifactList(artifacts []*apiV2beta1.Artifact) (*pipelinespec.ArtifactList, error) { + if len(artifacts) == 0 { + return &pipelinespec.ArtifactList{}, nil + } + + // Check if all artifacts are metrics + allMetrics := true + for _, artifact := range artifacts { + if artifact.Type != apiV2beta1.Artifact_Metric { + allMetrics = false + break + } + } + + // If all are metrics and there is multiple, merge into ONE RuntimeArtifact. + // This is because the KFP sdk expects a single RuntimeArtifact for metrics. + // for a given key, and it expects the key/value data to be present in the + // metadata. + if allMetrics && len(artifacts) > 1 { + // Merge all metric artifacts into one RuntimeArtifact with combined metadata + mergedMetadata := make(map[string]*structpb.Value) + var firstName string + var firstURI string + var firstArtifactID string + + for i, artifact := range artifacts { + // Use first artifact's common fields + if i == 0 { + firstName = artifact.GetName() + firstURI = artifact.GetUri() + firstArtifactID = artifact.GetArtifactId() + } + + // Merge metadata fields: each artifact's metadata contains the metric key/value + if artifact.GetMetadata() != nil { + for key, value := range artifact.GetMetadata() { + mergedMetadata[key] = value + } + } + + // Also include the NumberValue in metadata if present + if artifact.NumberValue != nil { + // The artifact Name is the metric key (e.g., "accuracy", "precision") + metricKey := artifact.GetName() + if metricKey != "" { + mergedMetadata[metricKey] = structpb.NewNumberValue(*artifact.NumberValue) + } + } + } + + // Create single RuntimeArtifact with merged metadata + mergedRuntimeArtifact := &pipelinespec.RuntimeArtifact{ + Name: firstName, + ArtifactId: firstArtifactID, + Type: &pipelinespec.ArtifactTypeSchema{ + Kind: &pipelinespec.ArtifactTypeSchema_SchemaTitle{ + SchemaTitle: apiV2beta1.Artifact_Metric.String(), + }, + }, + Metadata: &structpb.Struct{ + Fields: mergedMetadata, + }, + } + if firstURI != "" { + mergedRuntimeArtifact.Uri = firstURI + } + + return &pipelinespec.ArtifactList{ + Artifacts: []*pipelinespec.RuntimeArtifact{mergedRuntimeArtifact}, + }, nil + } + + // Non-metrics or single artifact: convert each artifact to RuntimeArtifact (existing behavior) + var runtimeArtifacts []*pipelinespec.RuntimeArtifact + for _, artifact := range artifacts { + runtimeArtifact, err := convertArtifactToRuntimeArtifact(artifact) + if err != nil { + return nil, err + } + runtimeArtifacts = append(runtimeArtifacts, runtimeArtifact) + } + return &pipelinespec.ArtifactList{ + Artifacts: runtimeArtifacts, + }, nil +} + +func convertArtifactToRuntimeArtifact( + artifact *apiV2beta1.Artifact, +) (*pipelinespec.RuntimeArtifact, error) { + if artifact.GetName() == "" && artifact.GetUri() == "" { + return nil, fmt.Errorf("artifact name or uri cannot be empty") + } + runtimeArtifact := &pipelinespec.RuntimeArtifact{ + Name: artifact.GetName(), + ArtifactId: artifact.GetArtifactId(), + Type: &pipelinespec.ArtifactTypeSchema{ + Kind: &pipelinespec.ArtifactTypeSchema_SchemaTitle{ + SchemaTitle: artifact.Type.String(), + }, + }, + } + if artifact.GetUri() != "" { + runtimeArtifact.Uri = artifact.GetUri() + } + if artifact.GetMetadata() != nil { + runtimeArtifact.Metadata = &structpb.Struct{ + Fields: artifact.GetMetadata(), + } + } + return runtimeArtifact, nil +} + +func addValueToStructPBList(list *structpb.Value, value *structpb.Value) error { + switch v := list.GetKind().(type) { + case *structpb.Value_ListValue: + v.ListValue.Values = append(v.ListValue.Values, value) + return nil + default: + return fmt.Errorf("value of type %T cannot be appended to", v) + } +} diff --git a/backend/src/v2/driver/container_test.go b/backend/src/v2/driver/container_test.go new file mode 100644 index 00000000000..d103b5ec0ab --- /dev/null +++ b/backend/src/v2/driver/container_test.go @@ -0,0 +1,280 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package driver + +import ( + "testing" + + apiV2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/stretchr/testify/assert" + "google.golang.org/protobuf/types/known/structpb" +) + +// TestConvertArtifactsToArtifactList_MultipleMetrics tests that multiple metric +// artifacts are merged into a single RuntimeArtifact with combined metadata +func TestConvertArtifactsToArtifactList_MultipleMetrics(t *testing.T) { + accuracy := 0.95 + precision := 0.87 + recall := 0.91 + + artifacts := []*apiV2beta1.Artifact{ + { + ArtifactId: "artifact-1", + Name: "accuracy", + Type: apiV2beta1.Artifact_Metric, + NumberValue: &accuracy, + Metadata: map[string]*structpb.Value{ + "accuracy": structpb.NewNumberValue(accuracy), + }, + }, + { + ArtifactId: "artifact-2", + Name: "precision", + Type: apiV2beta1.Artifact_Metric, + NumberValue: &precision, + Metadata: map[string]*structpb.Value{ + "precision": structpb.NewNumberValue(precision), + }, + }, + { + ArtifactId: "artifact-3", + Name: "recall", + Type: apiV2beta1.Artifact_Metric, + NumberValue: &recall, + Metadata: map[string]*structpb.Value{ + "recall": structpb.NewNumberValue(recall), + }, + }, + } + + artifactList, err := convertArtifactsToArtifactList(artifacts) + assert.NoError(t, err) + assert.NotNil(t, artifactList) + + // Should have ONE RuntimeArtifact with merged metadata + assert.Equal(t, 1, len(artifactList.Artifacts), "Should merge multiple metrics into one RuntimeArtifact") + + runtimeArtifact := artifactList.Artifacts[0] + assert.NotNil(t, runtimeArtifact.Metadata, "Merged artifact should have metadata") + + // Verify all metrics are in the metadata + metadata := runtimeArtifact.Metadata.Fields + assert.Equal(t, 3, len(metadata), "Metadata should contain all three metrics") + + // Verify each metric value + assert.NotNil(t, metadata["accuracy"]) + assert.Equal(t, accuracy, metadata["accuracy"].GetNumberValue()) + + assert.NotNil(t, metadata["precision"]) + assert.Equal(t, precision, metadata["precision"].GetNumberValue()) + + assert.NotNil(t, metadata["recall"]) + assert.Equal(t, recall, metadata["recall"].GetNumberValue()) +} + +// TestConvertArtifactsToArtifactList_SingleMetric tests that a single metric +// artifact is converted without merging (normal behavior) +func TestConvertArtifactsToArtifactList_SingleMetric(t *testing.T) { + accuracy := 0.95 + + artifacts := []*apiV2beta1.Artifact{ + { + ArtifactId: "artifact-1", + Name: "accuracy", + Type: apiV2beta1.Artifact_Metric, + NumberValue: &accuracy, + Metadata: map[string]*structpb.Value{ + "accuracy": structpb.NewNumberValue(accuracy), + }, + }, + } + + artifactList, err := convertArtifactsToArtifactList(artifacts) + assert.NoError(t, err) + assert.NotNil(t, artifactList) + + // Single metric should NOT be merged (normal conversion) + assert.Equal(t, 1, len(artifactList.Artifacts), "Should have one RuntimeArtifact") + + runtimeArtifact := artifactList.Artifacts[0] + assert.Equal(t, "accuracy", runtimeArtifact.Name) + assert.Equal(t, "Metric", runtimeArtifact.Type.GetSchemaTitle()) +} + +// TestConvertArtifactsToArtifactList_NonMetrics tests that non-metric artifacts +// are not merged and follow normal conversion +func TestConvertArtifactsToArtifactList_NonMetrics(t *testing.T) { + uri1 := "s3://bucket/dataset1" + uri2 := "s3://bucket/dataset2" + + artifacts := []*apiV2beta1.Artifact{ + { + ArtifactId: "artifact-1", + Name: "dataset1", + Type: apiV2beta1.Artifact_Dataset, + Uri: &uri1, + }, + { + ArtifactId: "artifact-2", + Name: "dataset2", + Type: apiV2beta1.Artifact_Dataset, + Uri: &uri2, + }, + } + + artifactList, err := convertArtifactsToArtifactList(artifacts) + assert.NoError(t, err) + assert.NotNil(t, artifactList) + + // Non-metrics should NOT be merged - each gets its own RuntimeArtifact + assert.Equal(t, 2, len(artifactList.Artifacts), "Non-metrics should not be merged") + + // Verify each artifact independently + names := make(map[string]bool) + for _, artifact := range artifactList.Artifacts { + names[artifact.Name] = true + assert.Equal(t, "Dataset", artifact.Type.GetSchemaTitle()) + } + + assert.True(t, names["dataset1"], "Should have dataset1") + assert.True(t, names["dataset2"], "Should have dataset2") +} + +// TestConvertArtifactsToArtifactList_MixedTypes tests that when artifacts +// contain mixed types (not all metrics), they are not merged +func TestConvertArtifactsToArtifactList_MixedTypes(t *testing.T) { + accuracy := 0.95 + uri := "s3://bucket/model" + + artifacts := []*apiV2beta1.Artifact{ + { + ArtifactId: "artifact-1", + Name: "accuracy", + Type: apiV2beta1.Artifact_Metric, + NumberValue: &accuracy, + }, + { + ArtifactId: "artifact-2", + Name: "model", + Type: apiV2beta1.Artifact_Model, + Uri: &uri, + }, + } + + artifactList, err := convertArtifactsToArtifactList(artifacts) + assert.NoError(t, err) + assert.NotNil(t, artifactList) + + // Mixed types should NOT be merged + assert.Equal(t, 2, len(artifactList.Artifacts), "Mixed types should not be merged") +} + +// TestConvertArtifactsToArtifactList_EmptyList tests that empty artifact list +// is handled correctly +func TestConvertArtifactsToArtifactList_EmptyList(t *testing.T) { + artifacts := []*apiV2beta1.Artifact{} + + artifactList, err := convertArtifactsToArtifactList(artifacts) + assert.NoError(t, err) + assert.NotNil(t, artifactList) + assert.Equal(t, 0, len(artifactList.Artifacts), "Empty list should return empty ArtifactList") +} + +// TestConvertArtifactsToArtifactList_MetricsWithURIAndMetadata tests that +// merged metrics preserve URI from first artifact +func TestConvertArtifactsToArtifactList_MetricsWithURIAndMetadata(t *testing.T) { + accuracy := 0.95 + precision := 0.87 + uri := "s3://bucket/metrics.json" + + artifacts := []*apiV2beta1.Artifact{ + { + ArtifactId: "artifact-1", + Name: "accuracy", + Type: apiV2beta1.Artifact_Metric, + Uri: &uri, + NumberValue: &accuracy, + Metadata: map[string]*structpb.Value{ + "accuracy": structpb.NewNumberValue(accuracy), + }, + }, + { + ArtifactId: "artifact-2", + Name: "precision", + Type: apiV2beta1.Artifact_Metric, + NumberValue: &precision, + Metadata: map[string]*structpb.Value{ + "precision": structpb.NewNumberValue(precision), + }, + }, + } + + artifactList, err := convertArtifactsToArtifactList(artifacts) + assert.NoError(t, err) + assert.NotNil(t, artifactList) + + // Verify merged artifact has URI from first artifact + assert.Equal(t, 1, len(artifactList.Artifacts)) + mergedArtifact := artifactList.Artifacts[0] + assert.Equal(t, uri, mergedArtifact.Uri, "Should preserve URI from first artifact") + + // Verify metadata contains both metrics + metadata := mergedArtifact.Metadata.Fields + assert.Equal(t, 2, len(metadata)) + assert.NotNil(t, metadata["accuracy"]) + assert.NotNil(t, metadata["precision"]) +} + +// TestConvertArtifactsToArtifactList_MetricsNumberValueInMetadata tests that +// NumberValue is properly included in merged metadata for multiple metrics +func TestConvertArtifactsToArtifactList_MetricsNumberValueInMetadata(t *testing.T) { + accuracy := 0.95 + precision := 0.87 + + // Test with multiple metrics where one has no metadata field + artifacts := []*apiV2beta1.Artifact{ + { + ArtifactId: "artifact-1", + Name: "accuracy", + Type: apiV2beta1.Artifact_Metric, + NumberValue: &accuracy, + // No metadata field - NumberValue should still be included in merged metadata + }, + { + ArtifactId: "artifact-2", + Name: "precision", + Type: apiV2beta1.Artifact_Metric, + NumberValue: &precision, + Metadata: map[string]*structpb.Value{ + "precision": structpb.NewNumberValue(precision), + }, + }, + } + + artifactList, err := convertArtifactsToArtifactList(artifacts) + assert.NoError(t, err) + assert.NotNil(t, artifactList) + + // Should merge into one RuntimeArtifact + assert.Equal(t, 1, len(artifactList.Artifacts)) + runtimeArtifact := artifactList.Artifacts[0] + assert.NotNil(t, runtimeArtifact.Metadata) + + // Verify both metrics are in metadata (including the one without explicit metadata field) + metadata := runtimeArtifact.Metadata.Fields + assert.Equal(t, 2, len(metadata), "Should have both metrics in metadata") + assert.NotNil(t, metadata["accuracy"], "Should have accuracy from NumberValue") + assert.NotNil(t, metadata["precision"], "Should have precision from metadata") +} diff --git a/backend/src/v2/driver/dag.go b/backend/src/v2/driver/dag.go index 362fac66f7f..09151827798 100644 --- a/backend/src/v2/driver/dag.go +++ b/backend/src/v2/driver/dag.go @@ -18,67 +18,60 @@ import ( "context" "encoding/json" "fmt" + "strings" "github.com/golang/glog" - "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + gc "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/backend/src/v2/client_manager" + "github.com/kubeflow/pipelines/backend/src/v2/driver/common" + "github.com/kubeflow/pipelines/backend/src/v2/driver/resolver" "github.com/kubeflow/pipelines/backend/src/v2/expression" - "github.com/kubeflow/pipelines/backend/src/v2/metadata" - "google.golang.org/protobuf/types/known/structpb" + "google.golang.org/protobuf/types/known/timestamppb" ) -func validateDAG(opts Options) (err error) { +func DAG(ctx context.Context, opts common.Options, clientManager client_manager.ClientManagerInterface) (execution *Execution, err error) { defer func() { if err != nil { - err = fmt.Errorf("invalid DAG driver args: %w", err) + err = fmt.Errorf("driver.DAG(%s) failed: %w", opts.Info(), err) } }() - if opts.Container != nil { - return fmt.Errorf("container spec is unnecessary") - } - return validateNonRoot(opts) -} -func DAG(ctx context.Context, opts Options, mlmd *metadata.Client) (execution *Execution, err error) { - defer func() { - if err != nil { - err = fmt.Errorf("driver.DAG(%s) failed: %w", opts.info(), err) - } - }() - b, _ := json.Marshal(opts) - glog.V(4).Info("DAG opts: ", string(b)) - err = validateDAG(opts) + b, err := json.Marshal(opts) if err != nil { return nil, err } - var iterationIndex *int - if opts.IterationIndex >= 0 { - index := opts.IterationIndex - iterationIndex = &index - } - // TODO(Bobgy): there's no need to pass any parameters, because pipeline - // and pipeline run context have been created by root DAG driver. - pipeline, err := mlmd.GetPipeline(ctx, opts.PipelineName, opts.RunID, "", "", "", "") - if err != nil { + + glog.V(4).Info("DAG opts: ", string(b)) + if err = validateDAG(opts); err != nil { return nil, err } - dag, err := mlmd.GetDAG(ctx, opts.DAGExecutionID) - if err != nil { - return nil, err + + if clientManager == nil { + return nil, fmt.Errorf("ClientManager is nil") } - glog.Infof("parent DAG: %+v", dag.Execution) + expr, err := expression.New() if err != nil { return nil, err } - inputs, err := resolveInputs(ctx, dag, iterationIndex, pipeline, opts, mlmd, expr) + + // Determine this Task's Type + inputs, iterationCount, err := resolver.ResolveInputs(ctx, opts) if err != nil { return nil, err } - executorInput := &pipelinespec.ExecutorInput{ - Inputs: inputs, + + executorInput, err := pipelineTaskInputsToExecutorInputs(inputs) + if err != nil { + return nil, fmt.Errorf("failed to convert inputs to executor inputs: %w", err) } + + // TODO(HumairAK) this doesn't seem used in dag case (or root) + // consider removing it. ExecutorInput is only required by Runtimes. glog.Infof("executorInput value: %+v", executorInput) execution = &Execution{ExecutorInput: executorInput} + condition := opts.Task.GetTriggerPolicy().GetCondition() if condition != "" { willTrigger, err := expr.Condition(executorInput, condition) @@ -87,94 +80,66 @@ func DAG(ctx context.Context, opts Options, mlmd *metadata.Client) (execution *E } execution.Condition = &willTrigger } - ecfg, err := metadata.GenerateExecutionConfig(executorInput) - if err != nil { - return execution, err + + taskToCreate := &gc.PipelineTaskDetail{ + Name: opts.TaskName, + DisplayName: opts.Task.GetTaskInfo().GetName(), + RunId: opts.Run.GetRunId(), + // Default to DAG + Type: gc.PipelineTaskDetail_DAG, + State: gc.PipelineTaskDetail_RUNNING, + ScopePath: opts.ScopePath.StringPath(), + StartTime: timestamppb.Now(), + CreateTime: timestamppb.Now(), + Pods: []*gc.PipelineTaskDetail_TaskPod{ + { + Name: opts.PodName, + Uid: opts.PodUID, + Type: gc.PipelineTaskDetail_DRIVER, + }, + }, } - // Set task name to display name if not specified. This is the case of - // specialty tasks such as OneOfs and ParallelFors where there are not - // explicit dag tasks defined in the pipeline, but rather generated at - // compile time and assigned a display name. - taskName := opts.TaskName - if taskName == "" { - taskName = opts.Task.GetTaskInfo().GetName() + // Determine type of DAG task. + // In the future the KFP Sdk should add a Task Type enum to the task Info proto + // to assist with inferring type. For now, we infer the type based on attribute + // heuristics. + switch { + case iterationCount != nil: + count := int64(*iterationCount) + taskToCreate.TypeAttributes = &gc.PipelineTaskDetail_TypeAttributes{IterationCount: &count} + taskToCreate.Type = gc.PipelineTaskDetail_LOOP + taskToCreate.DisplayName = "Loop" + execution.IterationCount = util.IntPointer(int(count)) + case condition != "": + taskToCreate.Type = gc.PipelineTaskDetail_CONDITION_BRANCH + taskToCreate.DisplayName = "Condition Branch" + case strings.HasPrefix(opts.TaskName, "condition") && !strings.HasPrefix(opts.TaskName, "condition-branch"): + taskToCreate.Type = gc.PipelineTaskDetail_CONDITION + taskToCreate.DisplayName = "Condition" + default: + taskToCreate.Type = gc.PipelineTaskDetail_DAG } - ecfg.TaskName = taskName - ecfg.DisplayName = opts.Task.GetTaskInfo().GetName() - ecfg.ExecutionType = metadata.DagExecutionTypeName - ecfg.ParentDagID = dag.Execution.GetID() - ecfg.IterationIndex = iterationIndex - ecfg.NotTriggered = !execution.WillTrigger() - - // Handle writing output parameters to MLMD. - ecfg.OutputParameters = opts.Component.GetDag().GetOutputs().GetParameters() - glog.V(4).Info("outputParameters: ", ecfg.OutputParameters) - - // Handle writing output artifacts to MLMD. - ecfg.OutputArtifacts = opts.Component.GetDag().GetOutputs().GetArtifacts() - glog.V(4).Info("outputArtifacts: ", ecfg.OutputArtifacts) - - totalDagTasks := len(opts.Component.GetDag().GetTasks()) - ecfg.TotalDagTasks = &totalDagTasks - glog.V(4).Info("totalDagTasks: ", *ecfg.TotalDagTasks) - - if opts.Task.GetArtifactIterator() != nil { - return execution, fmt.Errorf("ArtifactIterator is not implemented") + + if opts.ParentTask.GetTaskId() != "" { + taskToCreate.ParentTaskId = util.StringPointer(opts.ParentTask.GetTaskId()) } - isIterator := opts.Task.GetParameterIterator() != nil && opts.IterationIndex < 0 - // Fan out iterations - if execution.WillTrigger() && isIterator { - iterator := opts.Task.GetParameterIterator() - report := func(err error) error { - return fmt.Errorf("iterating on item input %q failed: %w", iterator.GetItemInput(), err) - } - // Check the items type of parameterIterator: - // It can be "inputParameter" or "Raw" - var value *structpb.Value - switch iterator.GetItems().GetKind().(type) { - case *pipelinespec.ParameterIteratorSpec_ItemsSpec_InputParameter: - var ok bool - value, ok = executorInput.GetInputs().GetParameterValues()[iterator.GetItems().GetInputParameter()] - if !ok { - return execution, report(fmt.Errorf("cannot find input parameter")) - } - case *pipelinespec.ParameterIteratorSpec_ItemsSpec_Raw: - value_raw := iterator.GetItems().GetRaw() - var unmarshalled_raw interface{} - err = json.Unmarshal([]byte(value_raw), &unmarshalled_raw) - if err != nil { - return execution, fmt.Errorf("error unmarshall raw string: %q", err) - } - value, err = structpb.NewValue(unmarshalled_raw) - if err != nil { - return execution, fmt.Errorf("error converting unmarshalled raw string into protobuf Value type: %q", err) - } - // Add the raw input to the executor input - execution.ExecutorInput.Inputs.ParameterValues[iterator.GetItemInput()] = value - default: - return execution, fmt.Errorf("cannot find parameter iterator") - } - items, err := getItems(value) - if err != nil { - return execution, report(err) - } - count := len(items) - ecfg.IterationCount = &count - execution.IterationCount = &count + taskToCreate, err = handleInputTaskParametersCreation(inputs.Parameters, taskToCreate) + if err != nil { + return execution, err } + glog.Infof("Creating task: %+v", taskToCreate) + createdTask, err := clientManager.KFPAPIClient().CreateTask(ctx, &gc.CreateTaskRequest{Task: taskToCreate}) + if err != nil { + return execution, err + } + glog.Infof("Created task: %+v", createdTask) + execution.TaskID = createdTask.TaskId - glog.V(4).Info("pipeline: ", pipeline) - b, _ = json.Marshal(*ecfg) - glog.V(4).Info("ecfg: ", string(b)) - glog.V(4).Infof("dag: %v", dag) - - // TODO(Bobgy): change execution state to pending, because this is driver, execution hasn't started. - createdExecution, err := mlmd.CreateExecution(ctx, pipeline, ecfg) + err = handleInputTaskArtifactsCreation(ctx, opts, inputs.Artifacts, createdTask, clientManager.KFPAPIClient()) if err != nil { return execution, err } - glog.Infof("Created execution: %s", createdExecution) - execution.ID = createdExecution.GetID() + return execution, nil } diff --git a/backend/src/v2/driver/dag_test.go b/backend/src/v2/driver/dag_test.go new file mode 100644 index 00000000000..8dfbf1a9705 --- /dev/null +++ b/backend/src/v2/driver/dag_test.go @@ -0,0 +1,1129 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package driver + +import ( + "context" + "fmt" + "testing" + + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "google.golang.org/protobuf/types/known/structpb" + v1 "k8s.io/api/core/v1" + "k8s.io/apimachinery/pkg/util/json" +) + +func TestRootDagComponentInputs(t *testing.T) { + runtimeConfig := &pipelinespec.PipelineJob_RuntimeConfig{ + ParameterValues: map[string]*structpb.Value{ + "string_input": structpb.NewStringValue("test-input1"), + "number_input": structpb.NewNumberValue(42.5), + "bool_input": structpb.NewBoolValue(true), + "null_input": structpb.NewNullValue(), + "list_input": structpb.NewListValue(&structpb.ListValue{Values: []*structpb.Value{ + structpb.NewStringValue("value1"), + structpb.NewNumberValue(42), + structpb.NewBoolValue(true), + }}), + "map_input": structpb.NewStructValue(&structpb.Struct{ + Fields: map[string]*structpb.Value{ + "key1": structpb.NewStringValue("value1"), + "key2": structpb.NewNumberValue(42), + "key3": structpb.NewListValue(&structpb.ListValue{ + Values: []*structpb.Value{ + structpb.NewStringValue("nested1"), + structpb.NewStringValue("nested2"), + }, + }), + }, + }), + }, + } + + tc := NewTestContextWithRootExecuted(t, runtimeConfig, "test_data/taskOutputArtifact_test.yaml") + task := tc.RootTask + require.NotNil(t, task.Inputs) + require.NotEmpty(t, task.Inputs.Parameters) + + // Verify parameter values + paramMap := make(map[string]*structpb.Value) + for _, param := range task.Inputs.Parameters { + paramMap[param.GetParameterKey()] = param.Value + } + + assert.Equal(t, "test-input1", paramMap["string_input"].GetStringValue()) + assert.Equal(t, 42.5, paramMap["number_input"].GetNumberValue()) + assert.Equal(t, true, paramMap["bool_input"].GetBoolValue()) + assert.NotNil(t, paramMap["null_input"].GetNullValue()) + assert.Len(t, paramMap["list_input"].GetListValue().Values, 3) + assert.NotNil(t, paramMap["map_input"].GetStructValue()) + assert.Len(t, paramMap["map_input"].GetStructValue().Fields, 3) +} + +func TestLoopArtifactPassing(t *testing.T) { + tc := NewTestContextWithRootExecuted( + t, + &pipelinespec.PipelineJob_RuntimeConfig{}, + "test_data/loop_collected_raw_Iterator.yaml", + ) + parentTask := tc.RootTask + + // Run Dag on the First Task + secondaryPipelineExecution, secondaryPipelineTask := tc.RunDagDriver("secondary-pipeline", parentTask) + require.Nil(t, secondaryPipelineExecution.ExecutorInput.Outputs) + require.Equal(t, apiv2beta1.PipelineTaskDetail_RUNNING, secondaryPipelineTask.State) + + // Refresh Parent Task - The parent task should be the secondary pipeline task for "create-dataset" + parentTask = secondaryPipelineTask + + // Now we'll run the subtasks in the secondary pipeline, one of which is a loop of 3 iterations + + // Run the Downstream Task that will use the output artifact + createDataSetExecution, _ := tc.RunContainerDriver("create-dataset", parentTask, nil, false) + // Expect the output artifact to be created + require.NotNil(t, createDataSetExecution.ExecutorInput.Outputs) + require.NotNil(t, createDataSetExecution.ExecutorInput.Outputs.Artifacts) + require.Equal(t, 1, len(createDataSetExecution.ExecutorInput.Outputs.Artifacts)) + require.Contains(t, createDataSetExecution.ExecutorInput.Outputs.Artifacts, "output_dataset") + require.Equal(t, "output_dataset", createDataSetExecution.ExecutorInput.Outputs.Artifacts["output_dataset"].GetArtifacts()[0].Name) + + // Run the actual launcher with mocks to simulate component execution + launcherExec := tc.RunLauncher(createDataSetExecution, map[string][]byte{"/tmp/kfp_outputs/output_metadata.json": []byte("{}")}, true) + // Get the artifact ID that was created + require.Len(t, launcherExec.Task.Outputs.Artifacts, 1) + createDataSetOutputArtifactID := launcherExec.Task.Outputs.Artifacts[0].Artifacts[0].ArtifactId + + // Run the Loop Task - note that parentTask for for-loop-2 remains as secondary-pipeline + loopExecution, loopTask := tc.RunDagDriver("for-loop-2", parentTask) + require.Nil(t, secondaryPipelineExecution.ExecutorInput.Outputs) + require.NotZero(t, len(loopTask.Inputs.Parameters)) + // Expect loop task to have resolved its input parameter + require.Equal(t, "pipelinechannel--loop-item-param-1", loopTask.Inputs.Parameters[0].ParameterKey) + // Expect the artifact output of create-dataset as input to for-loop-2 + require.Equal(t, len(loopTask.Inputs.Artifacts), 1) + + // The parent task should be "for-loop-2" for the iterations at first depth + parentTask = loopTask + + // Perform the iteration calls + for index, paramID := range []string{"1", "2", "3"} { + // Run the "process-dataset" Container Task with iteration index + processExecution, _ := tc.RunContainerDriver("process-dataset", parentTask, util.Int64Pointer(int64(index)), false) + require.NotNil(t, processExecution.ExecutorInput.Outputs) + require.NotNil(t, processExecution.ExecutorInput.Inputs.Artifacts["input_dataset"]) + require.Equal(t, 1, len(processExecution.ExecutorInput.Inputs.Artifacts["input_dataset"].GetArtifacts())) + require.Equal(t, processExecution.ExecutorInput.Inputs.Artifacts["input_dataset"].GetArtifacts()[0].ArtifactId, createDataSetOutputArtifactID) + require.NotNil(t, processExecution.ExecutorInput.Inputs.ParameterValues["model_id_in"]) + require.Equal(t, processExecution.ExecutorInput.Inputs.ParameterValues["model_id_in"].GetStringValue(), paramID) + + // Run the actual launcher for process-dataset + // The launcher will automatically propagate outputs up the DAG hierarchy + processLauncherExec := tc.RunLauncher(processExecution, map[string][]byte{"/tmp/kfp_outputs/output_metadata.json": []byte("{}")}, true) + // Get the artifact ID that was created + require.Len(t, processLauncherExec.Task.Outputs.Artifacts, 1) + processDataSetArtifactID := processLauncherExec.Task.Outputs.Artifacts[0].Artifacts[0].ArtifactId + + // Verify that the launcher automatically propagated the output artifact to the for-loop-2 task + loopTask, err := tc.ClientManager.KFPAPIClient().GetTask(context.Background(), &apiv2beta1.GetTaskRequest{TaskId: loopExecution.TaskID}) + require.NoError(t, err) + require.NotNil(t, loopTask.Outputs) + require.Equal(t, len(loopTask.Outputs.Artifacts), index+1, "Loop task should have %d artifacts after iteration %d", index+1, index) + + // Verify that the launcher also propagated the output artifact up to the secondary-pipeline task + secondaryPipelineTask, err = tc.ClientManager.KFPAPIClient().GetTask(context.Background(), &apiv2beta1.GetTaskRequest{TaskId: secondaryPipelineExecution.TaskID}) + require.NoError(t, err) + require.NotNil(t, secondaryPipelineTask.Outputs) + require.Equal(t, index+1, len(secondaryPipelineTask.Outputs.Artifacts), "Secondary pipeline task should have %d artifacts after iteration %d", index+1, index) + + // Run the next iteration component + analyzeExecution, _ := tc.RunContainerDriver("analyze-artifact", parentTask, util.Int64Pointer(int64(index)), false) + require.NotNil(t, createDataSetExecution.ExecutorInput.Outputs) + require.NotNil(t, analyzeExecution.ExecutorInput.Outputs) + require.NotNil(t, analyzeExecution.ExecutorInput.Inputs.Artifacts["analyze_artifact_input"]) + require.Equal(t, 1, len(analyzeExecution.ExecutorInput.Inputs.Artifacts["analyze_artifact_input"].GetArtifacts())) + require.Equal(t, analyzeExecution.ExecutorInput.Inputs.Artifacts["analyze_artifact_input"].GetArtifacts()[0].ArtifactId, processDataSetArtifactID) + + // Run the actual launcher for analyze-artifact + _ = tc.RunLauncher(analyzeExecution, map[string][]byte{"/tmp/kfp_outputs/output_metadata.json": []byte("{}")}, true) + } + + tasks, err := tc.ClientManager.KFPAPIClient().ListTasks(context.Background(), &apiv2beta1.ListTasksRequest{ + ParentFilter: &apiv2beta1.ListTasksRequest_ParentId{ParentId: loopExecution.TaskID}, + }) + require.NoError(t, err) + require.NotNil(t, tasks) + // Expect 3 tasks for analyze-artifact + 3 tasks for process-dataset + require.Equal(t, 6, len(tasks.Tasks)) + + // Expect the 3 artifacts from process-task to have been collected by the for-loop-2 task + forLoopTask, err := tc.ClientManager.KFPAPIClient().GetTask(context.Background(), &apiv2beta1.GetTaskRequest{TaskId: loopExecution.TaskID}) + require.NoError(t, err) + require.Equal(t, 3, len(forLoopTask.Outputs.Artifacts)) + + // Verify producer attribution for loop task artifacts + // The producer should be the immediate child task from the loop's perspective (process-dataset), + // not the original runtime task that created the artifact + for i, artifactIO := range forLoopTask.Outputs.Artifacts { + require.NotNil(t, artifactIO.Producer, "Loop task artifact %d should have a producer", i) + require.Equal(t, "process-dataset", artifactIO.Producer.TaskName, + "Loop task artifact %d producer should be 'process-dataset' (immediate child)", i) + require.NotNil(t, artifactIO.Producer.Iteration, + "Loop task artifact %d should have iteration index preserved", i) + } + + // Run "analyze_artifact_list" in "secondary_pipeline" + // Move up a parent + parentTask = secondaryPipelineTask + tc.ExitDag() + + analyzeArtifactListExecution, analyzeArtifactListTask := tc.RunContainerDriver("analyze-artifact-list", parentTask, nil, false) + require.NotNil(t, analyzeArtifactListExecution.ExecutorInput.Outputs) + require.NotNil(t, analyzeArtifactListExecution.ExecutorInput.Inputs.Artifacts["artifact_list_input"]) + require.Equal(t, 3, len(analyzeArtifactListExecution.ExecutorInput.Inputs.Artifacts["artifact_list_input"].GetArtifacts())) + + artifactListLauncher := tc.RunLauncher(analyzeArtifactListExecution, map[string][]byte{"/tmp/kfp_outputs/output_metadata.json": []byte("{}")}, true) + require.NotNil(t, artifactListLauncher.Task) + _, err = tc.ClientManager.KFPAPIClient().GetTask(context.Background(), &apiv2beta1.GetTaskRequest{TaskId: analyzeArtifactListTask.TaskId}) + require.NoError(t, err) + // Primary Pipeline tests + + // Expect the 3 artifacts from process-task to have been collected by the secondary-pipeline task + secondaryPipelineTask, err = tc.ClientManager.KFPAPIClient().GetTask(context.Background(), &apiv2beta1.GetTaskRequest{TaskId: secondaryPipelineExecution.TaskID}) + require.NoError(t, err) + require.Equal(t, 3, len(secondaryPipelineTask.Outputs.Artifacts)) + + // Verify producer attribution for secondary-pipeline task artifacts + // The producer should be the immediate child from secondary-pipeline's perspective (for-loop-2), + // NOT the original runtime task (process-dataset) that created the artifact + // This demonstrates that producer attribution "resets" at each propagation level + for i, artifactIO := range secondaryPipelineTask.Outputs.Artifacts { + require.NotNil(t, artifactIO.Producer, "Secondary pipeline artifact %d should have a producer", i) + require.Equal(t, "for-loop-2", artifactIO.Producer.TaskName, + "Secondary pipeline artifact %d producer should be 'for-loop-2' (immediate child from secondary-pipeline's perspective)", i) + require.Nil(t, artifactIO.Producer.Iteration, + "Secondary pipeline artifact %d should not have iteration index preserved from process-dataset", i) + } + + // Move up a parent + parentTask = tc.RootTask + tc.ExitDag() + + // Not to be confused with the "analyze-artifact-list" task in secondary pipeline, + // this is the "analyze-artifact-list" task in the primary pipeline + analyzeArtifactListOuterExecution, _ := tc.RunContainerDriver("analyze-artifact-list", parentTask, nil, false) + require.NotNil(t, analyzeArtifactListExecution.ExecutorInput.Outputs) + require.NotNil(t, analyzeArtifactListOuterExecution.ExecutorInput.Outputs) + require.NotNil(t, analyzeArtifactListOuterExecution.ExecutorInput.Inputs.Artifacts["artifact_list_input"]) + require.Equal(t, 3, len(analyzeArtifactListOuterExecution.ExecutorInput.Inputs.Artifacts["artifact_list_input"].GetArtifacts())) + + _ = tc.RunLauncher(analyzeArtifactListOuterExecution, map[string][]byte{"/tmp/kfp_outputs/output_metadata.json": []byte("{}")}, true) + + // Refresh Run so it has the new tasks + tc.RefreshRun() + + // primary_pipeline() x 1 (root) + // secondary_pipeline() x 1 (dag) + // create_dataset() x 1 (runtime) + // for_loop_1() x 1 (loop) + // process_dataset() x 3 (runtime) + // analyze_artifact() x 3 (runtime) + // analyze_artifact_list() x 1 (runtime) + // analyze_artifact_list() x 1 (runtime) + require.Equal(t, 12, len(tc.Run.Tasks)) +} + +// TestParameterInputIterator will test parameter Input Iterator +// and parameter collection from output of a task in a loop +func TestParameterInputIterator(t *testing.T) { + tc := NewTestContextWithRootExecuted( + t, + &pipelinespec.PipelineJob_RuntimeConfig{}, + "test_data/loop_collected_InputParameter_Iterator.yaml", + ) + // Execute full pipeline + parentTask := tc.RootTask + _, secondaryPipelineTask := tc.RunDagDriver("secondary-pipeline", parentTask) + parentTask = secondaryPipelineTask + + splitIDsExecution, _ := tc.RunContainerDriver("split-ids", parentTask, nil, false) + + // Get the output parameter file path + outputParamPath := splitIDsExecution.ExecutorInput.Outputs.Parameters["Output"].OutputFile + + splitIDsLauncher := tc.RunLauncher(splitIDsExecution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + outputParamPath: []byte(`["1", "2", "3"]`), + }, true) + loopExecution, loopTask := tc.RunDagDriver("for-loop-1", parentTask) + parentTask = loopTask + require.NotNil(t, loopExecution) + require.NotNil(t, loopExecution.IterationCount) + require.Equal(t, 3, *loopExecution.IterationCount) + + for index := range []string{"1", "2", "3"} { + index64 := util.Int64Pointer(int64(index)) + createFileExecution, _ := tc.RunContainerDriver("create-file", parentTask, index64, false) + _ = tc.RunLauncher(createFileExecution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + }, true) + + // Run next task + readSingleFileExecution, _ := tc.RunContainerDriver("read-single-file", parentTask, index64, false) + readSingleFileOutputPath := readSingleFileExecution.ExecutorInput.Outputs.Parameters["Output"].OutputFile + _ = tc.RunLauncher(readSingleFileExecution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + readSingleFileOutputPath: []byte(fmt.Sprintf("file-%d", index)), + }, true) + _ = splitIDsLauncher + } + + tc.ExitDag() + parentTask = secondaryPipelineTask + + // Check what parameters the for-loop-1 task has after all iterations + refreshedLoopTask, err := tc.ClientManager.KFPAPIClient().GetTask(context.Background(), &apiv2beta1.GetTaskRequest{TaskId: loopTask.TaskId}) + require.NoError(t, err) + require.NotNil(t, refreshedLoopTask.Outputs) + require.Equal(t, 3, len(refreshedLoopTask.Outputs.Parameters)) + + // Verify producer attribution for loop task parameters + // The producer should be the immediate child task from the loop's perspective (read-single-file), + // not the original runtime task that created the parameter + for i, param := range refreshedLoopTask.Outputs.Parameters { + require.NotNil(t, param.Producer, "Loop task parameter %d should have a producer", i) + require.Equal(t, "read-single-file", param.Producer.TaskName, + "Loop task parameter %d producer should be 'read-single-file' (immediate child)", i) + require.NotNil(t, param.Producer.Iteration, + "Loop task parameter %d should have iteration index preserved", i) + } + + readValuesExecution, _ := tc.RunContainerDriver("read-values", parentTask, nil, false) + readValuesOutputPath := readValuesExecution.ExecutorInput.Outputs.Parameters["Output"].OutputFile + + _ = tc.RunLauncher(readValuesExecution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + readValuesOutputPath: []byte("files read"), + }, true) + + tc.ExitDag() + parentTask = tc.RootTask + + readValuesExecution2, _ := tc.RunContainerDriver("read-values", parentTask, nil, false) + readValuesOutputPath2 := readValuesExecution2.ExecutorInput.Outputs.Parameters["Output"].OutputFile + + _ = tc.RunLauncher(readValuesExecution2, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + readValuesOutputPath2: []byte("files read"), + }, true) + + task, err := tc.ClientManager.KFPAPIClient().GetTask(context.Background(), &apiv2beta1.GetTaskRequest{TaskId: secondaryPipelineTask.GetTaskId()}) + require.NoError(t, err) + require.NotNil(t, task.Outputs) + require.Equal(t, 3, len(task.Outputs.Parameters)) + + // Verify producer attribution for secondary-pipeline task parameters + // The producer should be the immediate child from secondary-pipeline's perspective (for-loop-1), + // NOT the original runtime task (read-single-file) that created the parameter + // This demonstrates that producer attribution "resets" at each propagation level + var collectOutputs []string + for i, params := range task.Outputs.Parameters { + collectOutputs = append(collectOutputs, params.GetValue().GetStringValue()) + require.Equal(t, apiv2beta1.IOType_ITERATOR_OUTPUT, params.GetType()) + + // Verify producer is the immediate child task (for-loop-1) + require.NotNil(t, params.Producer, "Secondary pipeline parameter %d should have a producer", i) + require.Equal(t, "for-loop-1", params.Producer.TaskName, + "Secondary pipeline parameter %d producer should be 'for-loop-1' (immediate child from secondary-pipeline's perspective)", i) + require.Nil(t, params.Producer.Iteration, + "Secondary pipeline parameter %d shouldn't propagate read-single-file", i) + } + require.Equal(t, []string{"file-0", "file-1", "file-2"}, collectOutputs) +} + +func TestNestedDag(t *testing.T) { + tc := NewTestContextWithRootExecuted(t, &pipelinespec.PipelineJob_RuntimeConfig{}, "test_data/nested_naming_conflicts.yaml") + parentTask := tc.RootTask + + aExecution, _ := tc.RunContainerDriver("a", parentTask, nil, false) + aLauncher := tc.RunLauncher(aExecution, map[string][]byte{"/tmp/kfp_outputs/output_metadata.json": []byte("{}")}, true) + _ = aLauncher.Task + + _, pipelineBTask := tc.RunDagDriver("pipeline-b", parentTask) + parentTask = pipelineBTask + + nestedAExecution, _ := tc.RunContainerDriver("a", parentTask, nil, false) + nestedALauncher := tc.RunLauncher(nestedAExecution, map[string][]byte{"/tmp/kfp_outputs/output_metadata.json": []byte("{}")}, true) + _ = nestedALauncher.Task + + nestedBExecution, _ := tc.RunContainerDriver("b", parentTask, nil, false) + nestedBLauncher := tc.RunLauncher(nestedBExecution, map[string][]byte{"/tmp/kfp_outputs/output_metadata.json": []byte("{}")}, true) + _ = nestedBLauncher.Task + + _, pipelineCTask := tc.RunDagDriver("pipeline-c", parentTask) + parentTask = pipelineCTask + + nestedNestedAExecution, _ := tc.RunContainerDriver("a", parentTask, nil, false) + nestedNestedALauncher := tc.RunLauncher(nestedNestedAExecution, map[string][]byte{"/tmp/kfp_outputs/output_metadata.json": []byte("{}")}, true) + _ = nestedNestedALauncher.Task + + nestedNestedBExecution, _ := tc.RunContainerDriver("b", parentTask, nil, false) + nestedNestedBLauncher := tc.RunLauncher(nestedNestedBExecution, map[string][]byte{"/tmp/kfp_outputs/output_metadata.json": []byte("{}")}, true) + nestedNestedBTask := nestedNestedBLauncher.Task + + cExecution, _ := tc.RunContainerDriver("c", parentTask, nil, false) + + // Run the launcher for task c which will create outputs and propagate them up + cLauncherExec := tc.RunLauncher(cExecution, map[string][]byte{"/tmp/kfp_outputs/output_metadata.json": []byte("{}")}, true) + cTask := cLauncherExec.Task + + tc.ExitDag() + tc.ExitDag() + parentTask = tc.RootTask + + _, _ = tc.RunContainerDriver("verify", parentTask, nil, true) + + var err error + + // Get the artifact ID from cTask's output + require.NotNil(t, cTask.Outputs) + require.Equal(t, 1, len(cTask.Outputs.Artifacts)) + cTaskArtifactID := cTask.Outputs.Artifacts[0].GetArtifacts()[0].GetArtifactId() + + // Confirm that the artifact passed to "verify" task came from task_c + // by checking that pipeline-b has the same artifact ID in its outputs (propagated from c) + pipelineBTask, err = tc.ClientManager.KFPAPIClient().GetTask(context.Background(), &apiv2beta1.GetTaskRequest{TaskId: pipelineBTask.GetTaskId()}) + require.NoError(t, err) + require.NotNil(t, pipelineBTask.Outputs) + require.Equal(t, 1, len(pipelineBTask.Outputs.Artifacts)) + require.Equal(t, cTaskArtifactID, pipelineBTask.Outputs.Artifacts[0].GetArtifacts()[0].GetArtifactId(), + "pipeline-b's output artifact should be the same artifact produced by task c") + + // Get the artifact ID from nestedNestedBTask's output + require.NotNil(t, nestedNestedBTask.Outputs) + require.Equal(t, 1, len(nestedNestedBTask.Outputs.Artifacts)) + nestedNestedBArtifactID := nestedNestedBTask.Outputs.Artifacts[0].GetArtifacts()[0].GetArtifactId() + + // Confirm that the artifact passed to cTask came from the nestedNestedBtask + // I.e the b() task that ran in pipeline-c and not in pipeline-b + cTask, err = tc.ClientManager.KFPAPIClient().GetTask(context.Background(), &apiv2beta1.GetTaskRequest{TaskId: cTask.GetTaskId()}) + require.NoError(t, err) + require.NotNil(t, cTask.Inputs) + require.Equal(t, 1, len(cTask.Inputs.Artifacts)) + require.Equal(t, nestedNestedBArtifactID, cTask.Inputs.Artifacts[0].GetArtifacts()[0].GetArtifactId(), + "cTask's input artifact should be from nested-nested-b, not nested-b") +} + +func TestParameterTaskOutput(t *testing.T) { + tc := NewTestContextWithRootExecuted(t, &pipelinespec.PipelineJob_RuntimeConfig{}, "test_data/taskOutputParameter_test.yaml") + parentTask := tc.RootTask + + // Run driver and launcher for create-dataset + cdExecution, _ := tc.RunContainerDriver("create-dataset", parentTask, nil, false) + cdOutputPath := cdExecution.ExecutorInput.Outputs.Parameters["output_parameter_path"].OutputFile + _ = tc.RunLauncher(cdExecution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + cdOutputPath: []byte("10.0"), + }, true) + + // Run driver and launcher for process-dataset + pdExecution, _ := tc.RunContainerDriver("process-dataset", parentTask, nil, false) + pdOutputPath := pdExecution.ExecutorInput.Outputs.Parameters["output_int"].OutputFile + _ = tc.RunLauncher(pdExecution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + pdOutputPath: []byte("100"), + }, true) + + // Run driver and launcher for analyze-artifact + analyzeArtifactExecution, _ := tc.RunContainerDriver("analyze-artifact", parentTask, nil, false) + analyzeOutputPath := analyzeArtifactExecution.ExecutorInput.Outputs.Parameters["output_opinion"].OutputFile + _ = tc.RunLauncher(analyzeArtifactExecution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + analyzeOutputPath: []byte("true"), + }, true) +} + +func TestOneOf(t *testing.T) { + tc := NewTestContextWithRootExecuted(t, &pipelinespec.PipelineJob_RuntimeConfig{}, "test_data/oneof_simple.yaml") + parentTask := tc.RootTask + require.NotNil(t, parentTask) + + // Run secondary pipeline + _, secondaryPipelineTask := tc.RunDagDriver("secondary-pipeline", parentTask) + parentTask = secondaryPipelineTask + + // Run create_dataset() + createDatasetExecution, _ := tc.RunContainerDriver("create-dataset", parentTask, nil, false) + + // Get the output parameter file path + conditionOutPath := createDatasetExecution.ExecutorInput.Outputs.Parameters["condition_out"].OutputFile + + _ = tc.RunLauncher(createDatasetExecution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + conditionOutPath: []byte("second"), + }, true) + + // Run ConditionBranch + _, conditionBranch1Task := tc.RunDagDriver("condition-branches-1", parentTask) + + // Expect this condition to not be met + condition2Execution, _ := tc.RunDagDriver("condition-2", conditionBranch1Task) + require.NotNil(t, condition2Execution.Condition) + require.False(t, *condition2Execution.Condition) + + tc.ExitDag() + + // Expect this condition to not be met + condition4Execution, _ := tc.RunDagDriver("condition-4", conditionBranch1Task) + require.NotNil(t, condition4Execution.Condition) + require.False(t, *condition4Execution.Condition) + + tc.ExitDag() + + // Expect this condition to pass since output of + // create-dataset == "second" + condition3Execution, condition3Task := tc.RunDagDriver("condition-3", conditionBranch1Task) + require.NotNil(t, condition3Execution.Condition) + require.True(t, *condition3Execution.Condition) + + parentTask = condition3Task + giveAnimal1Execution, _ := tc.RunContainerDriver("give-animal-2", parentTask, nil, false) + _ = tc.RunLauncher(giveAnimal1Execution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + }, true) + + analyzeAnimal1Execution, _ := tc.RunContainerDriver("analyze-animal", parentTask, nil, false) + + // Run the launcher for analyze-animal which will create outputs and propagate them up the DAG hierarchy + tc.RunLauncher(analyzeAnimal1Execution, map[string][]byte{"/tmp/kfp_outputs/output_metadata.json": []byte("{}")}, true) + + tc.ExitDag() + tc.ExitDag() + tc.ExitDag() + parentTask = tc.RootTask + + _, _ = tc.RunContainerDriver("check-animal", parentTask, nil, true) +} + +func TestFinalStatus(t *testing.T) { + tc := NewTestContextWithRootExecuted( + t, + &pipelinespec.PipelineJob_RuntimeConfig{}, + "test_data/pipeline_with_input_status_state.yaml", + ) + parentTask := tc.RootTask + require.NotNil(t, parentTask) + + _, exitHandler1Task := tc.RunDagDriver("exit-handler-1", parentTask) + parentTask = exitHandler1Task + + _, _ = tc.RunContainerDriver("some-task", parentTask, nil, true) + + tc.ExitDag() + parentTask = tc.RootTask + + _, echoStateTask := tc.RunContainerDriver("echo-state", parentTask, nil, true) + require.Len(t, echoStateTask.Inputs.GetParameters(), 1) + inputFinalStatusParam := echoStateTask.Inputs.GetParameters()[0] + require.NotNil(t, inputFinalStatusParam) + // Mock library doesn't update dag statuses, in production we would expect + // this to say "SUCCEEDED" once it's done running + require.Equal(t, "RUNNING", inputFinalStatusParam.GetValue().GetStructValue().Fields["state"].GetStringValue()) + require.Equal(t, "exit-handler-1", inputFinalStatusParam.GetValue().GetStructValue().Fields["pipelineTaskName"].GetStringValue()) +} + +func TestWithCaching(t *testing.T) { + tc := NewTestContextWithRootExecuted( + t, + &pipelinespec.PipelineJob_RuntimeConfig{}, + "test_data/cache_test.yaml", + ) + parentTask := tc.RootTask + require.NotNil(t, parentTask) + + // Run create-dataset driver and launcher + createDatasetExecution, _ := tc.RunContainerDriver("create-dataset", parentTask, nil, false) + createDatasetLauncher := tc.RunLauncher(createDatasetExecution, map[string][]byte{"/tmp/kfp_outputs/output_metadata.json": []byte("{}")}, true) + require.Len(t, createDatasetLauncher.Task.Outputs.Artifacts, 1) + + // First run of process-dataset - should not be cached + processDatasetExecution, _ := tc.RunContainerDriver("process-dataset", parentTask, nil, false) + processDatasetLauncher := tc.RunLauncher(processDatasetExecution, map[string][]byte{"/tmp/kfp_outputs/output_metadata.json": []byte("{}")}, true) + require.NotNil(t, processDatasetExecution.Cached) + require.False(t, *processDatasetExecution.Cached) + require.Equal(t, apiv2beta1.PipelineTaskDetail_SUCCEEDED, processDatasetLauncher.Task.GetState()) + require.NotEmpty(t, processDatasetExecution.PodSpecPatch) + + // Second run of process-dataset - should be cached + processDatasetExecution2, processDatasetTask2 := tc.RunContainerDriver("process-dataset", parentTask, nil, true) + require.NotNil(t, processDatasetExecution2.Cached) + require.True(t, *processDatasetExecution2.Cached) + require.Equal(t, apiv2beta1.PipelineTaskDetail_CACHED, processDatasetTask2.GetState()) + require.Empty(t, processDatasetExecution2.PodSpecPatch) +} + +func TestOptionalFields(t *testing.T) { + // The API Server will populate runtime config with + // the defaults in the root InputDefinition is they are + // not user overridden. We mock this here. + runtimeInputs := &pipelinespec.PipelineJob_RuntimeConfig{ + ParameterValues: map[string]*structpb.Value{ + "input_str4": structpb.NewNullValue(), + "input_str5": structpb.NewStringValue("Some pipeline default"), + "input_str6": structpb.NewNullValue(), + }, + } + + tc := NewTestContextWithRootExecuted( + t, runtimeInputs, + "test_data/component_with_optional_inputs.yaml", + ) + parentTask := tc.RootTask + require.NotNil(t, parentTask) + + execution, _ := tc.RunContainerDriver("component-op", parentTask, nil, false) + require.NotNil(t, execution) + + // Run launcher which will automatically add default parameters to the task + // via addDefaultParametersToTask() for any parameters that have defaults + // but weren't explicitly provided + launcherExec := tc.RunLauncher(execution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + }, true) + + params := launcherExec.Task.Inputs.GetParameters() + require.GreaterOrEqual(t, len(params), 0) + + p := tc.fetchParameter("input_str1", params) + require.NotNil(t, p) + + p = tc.fetchParameter("input_str2", params) + require.NotNil(t, p) + + p = tc.fetchParameter("input_str3", params) + require.Nil(t, p) + + p = tc.fetchParameter("input_str4_from_pipeline", params) + require.NotNil(t, p) + + p = tc.fetchParameter("input_str5_from_pipeline", params) + require.NotNil(t, p) + + p = tc.fetchParameter("input_str6_from_pipeline", params) + require.Nil(t, p) + + p = tc.fetchParameter("input_bool1", params) + require.NotNil(t, p) + + p = tc.fetchParameter("input_bool2", params) + require.Nil(t, p) + + p = tc.fetchParameter("input_dict", params) + require.NotNil(t, p) + + p = tc.fetchParameter("input_list", params) + require.NotNil(t, p) + + p = tc.fetchParameter("input_int", params) + require.NotNil(t, p) +} + +func TestK8SPlatform(t *testing.T) { + nodeAffinity := structpb.NewStructValue(&structpb.Struct{ + Fields: map[string]*structpb.Value{ + "requiredDuringSchedulingIgnoredDuringExecution": structpb.NewStructValue(&structpb.Struct{ + Fields: map[string]*structpb.Value{ + "nodeSelectorTerms": structpb.NewListValue(&structpb.ListValue{ + Values: []*structpb.Value{ + structpb.NewStructValue(&structpb.Struct{ + Fields: map[string]*structpb.Value{ + "matchExpressions": structpb.NewListValue(&structpb.ListValue{ + Values: []*structpb.Value{ + structpb.NewStructValue(&structpb.Struct{ + Fields: map[string]*structpb.Value{ + "key": structpb.NewStringValue("kubernetes.io/os"), + "operator": structpb.NewStringValue("In"), + "values": structpb.NewListValue(&structpb.ListValue{ + Values: []*structpb.Value{ + structpb.NewStringValue("linux"), + }, + }), + }, + }), + }, + }), + }, + }), + }, + }), + }, + }), + }, + }) + + // The API Server will populate runtime config with + // the defaults in the root InputDefinition is they are + // not user overridden. We mock this here. + runtimeInputs := &pipelinespec.PipelineJob_RuntimeConfig{ + ParameterValues: map[string]*structpb.Value{ + "configmap_parm": structpb.NewStringValue("cfg-2"), + "container_image": structpb.NewStringValue("python:3.7-alpine"), + "cpu_limit": structpb.NewStringValue("200m"), + "default_node_affinity_input": nodeAffinity, + "empty_dir_mnt_path": structpb.NewStringValue("/empty_dir/path"), + "field_path": structpb.NewStringValue("spec.serviceAccountName"), + "memory_limit": structpb.NewStringValue("50Mi"), + "node_selector_input": structpb.NewStructValue(&structpb.Struct{ + Fields: map[string]*structpb.Value{ + "kubernetes.io/os": structpb.NewStringValue("linux"), + }, + }), + "pull_secret_1": structpb.NewStringValue("pull-secret-1"), + "pull_secret_2": structpb.NewStringValue("pull-secret-2"), + "pull_secret_3": structpb.NewStringValue("pull-secret-3"), + "pvc_name_suffix_input": structpb.NewStringValue("-pvc-1"), + "secret_param": structpb.NewStringValue("secret-2"), + "tolerations_dict_input": structpb.NewStructValue(&structpb.Struct{ + Fields: map[string]*structpb.Value{ + "effect": structpb.NewStringValue("NoSchedule"), + "key": structpb.NewStringValue("some_foo_key6"), + "operator": structpb.NewStringValue("Equal"), + "value": structpb.NewStringValue("value3"), + }, + }), + "tolerations_list_input": structpb.NewListValue(&structpb.ListValue{ + Values: []*structpb.Value{ + structpb.NewStructValue(&structpb.Struct{ + Fields: map[string]*structpb.Value{ + "effect": structpb.NewStringValue("NoSchedule"), + "key": structpb.NewStringValue("some_foo_key4"), + "operator": structpb.NewStringValue("Equal"), + "value": structpb.NewStringValue("value2"), + }, + }), + structpb.NewStructValue(&structpb.Struct{ + Fields: map[string]*structpb.Value{ + "effect": structpb.NewStringValue("NoExecute"), + "key": structpb.NewStringValue("some_foo_key5"), + "operator": structpb.NewStringValue("Exists"), + }, + }), + }, + }), + }, + } + + tc := NewTestContextWithRootExecuted( + t, runtimeInputs, + "test_data/k8s_parameters.yaml", + ) + parentTask := tc.RootTask + require.NotNil(t, parentTask) + + // Execute all the preliminary tasks that will feed Task Output Parameters to the + // Assert tasks (and secondary pipeline) + + // Run cfg-name-generator + cfgNameGenExecution, _ := tc.RunContainerDriver("cfg-name-generator", parentTask, nil, false) + cfgNameGenOutputPath := cfgNameGenExecution.ExecutorInput.Outputs.Parameters["some_output"].OutputFile + _ = tc.RunLauncher(cfgNameGenExecution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + cfgNameGenOutputPath: []byte("cfg-3"), + }, true) + + // Run get-access-mode + getAccessModeExecution, _ := tc.RunContainerDriver("get-access-mode", parentTask, nil, false) + accessModeOutputPath := getAccessModeExecution.ExecutorInput.Outputs.Parameters["access_mode"].OutputFile + _ = tc.RunLauncher(getAccessModeExecution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + accessModeOutputPath: []byte("[\"ReadWriteOnce\"]"), + }, true) + + // Run get-node-affinity + getNodeAffinityExecution, _ := tc.RunContainerDriver("get-node-affinity", parentTask, nil, false) + nodeAffinityOutputPath := getNodeAffinityExecution.ExecutorInput.Outputs.Parameters["node_affinity"].OutputFile + // Serialize nodeAffinity to JSON + nodeAffinityJSON, err := json.Marshal(nodeAffinity.GetStructValue()) + require.NoError(t, err) + _ = tc.RunLauncher(getNodeAffinityExecution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + nodeAffinityOutputPath: nodeAffinityJSON, + }, true) + + // Run secret-name-generator + secretNameGenExecution, _ := tc.RunContainerDriver("secret-name-generator", parentTask, nil, false) + secretNameGenOutputPath := secretNameGenExecution.ExecutorInput.Outputs.Parameters["some_output"].OutputFile + _ = tc.RunLauncher(secretNameGenExecution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + secretNameGenOutputPath: []byte("secret-3"), + }, true) + + // Run generate-requests-resources + generateRequestExecution, _ := tc.RunContainerDriver("generate-requests-resources", parentTask, nil, false) + cpuRequestOutputPath := generateRequestExecution.ExecutorInput.Outputs.Parameters["cpu_request_out"].OutputFile + memoryRequestOutputPath := generateRequestExecution.ExecutorInput.Outputs.Parameters["memory_request_out"].OutputFile + _ = tc.RunLauncher(generateRequestExecution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + cpuRequestOutputPath: []byte("100m"), + memoryRequestOutputPath: []byte("50Mi"), + }, true) + + // Run create-pvc task since it depended on get-access-mode + // There is no launcher for this task, we expect the output + // parameter to be created by the driver call + + // Create a mock Kubernetes client for PVC operations + _, createPvcTask := tc.RunContainerDriver("createpvc", parentTask, nil, true) + require.NotNil(t, createPvcTask.Outputs) + // CreatePvc always has one output, which is the pvc name + require.Len(t, createPvcTask.Outputs.GetParameters(), 1) + require.Equal(t, createPvcTask.Outputs.GetParameters()[0].ParameterKey, "name") + + // Note we don't need to mock the parameter output for k8s tasks like createpvc since + // there is no launcher for them. + + executorInput, assertValuesTask := tc.RunContainerDriver("assert-values", parentTask, nil, true) + require.NotNil(t, assertValuesTask.Outputs) + require.NotNil(t, executorInput) + + podSpecString := executorInput.PodSpecPatch + require.NotEmpty(t, podSpecString) + + podSpec := &v1.PodSpec{} + err = json.Unmarshal([]byte(podSpecString), podSpec) + require.NoError(t, err) + + // Check that pod spec values were correctly set + require.Equal(t, "python:3.7-alpine", podSpec.Containers[0].Image) + require.Contains(t, podSpec.NodeSelector, "kubernetes.io/arch") + require.Equal(t, "amd64", podSpec.NodeSelector["kubernetes.io/arch"]) + require.Len(t, podSpec.Containers, 1) + + // The volumes are: pvc, secret, and cfg-map volumes + require.Len(t, podSpec.Volumes, 3) + require.Len(t, podSpec.Containers[0].VolumeMounts, 3) + + // Verify all volumes are present and configured correctly + // Volume 0: PVC volume + volume := podSpec.Volumes[0] + require.Contains(t, volume.Name, "-pvc-1") + require.Contains(t, volume.PersistentVolumeClaim.ClaimName, "-pvc-1") + + // Volume 1: Secret volume + volume = podSpec.Volumes[1] + require.Equal(t, "secret-2", volume.Name) + require.Equal(t, "secret-2", volume.Secret.SecretName) + require.False(t, *volume.Secret.Optional) + + // Volume 2: ConfigMap volume + volume = podSpec.Volumes[2] + require.Equal(t, "cfg-2", volume.Name) + require.Equal(t, "cfg-2", volume.ConfigMap.Name) + require.False(t, *volume.ConfigMap.Optional) + + // Node affinity + require.NotNil(t, podSpec.Affinity) + require.NotNil(t, podSpec.Affinity.NodeAffinity) + require.NotNil(t, podSpec.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution) + require.Len(t, podSpec.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms, 1) + require.Len(t, podSpec.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions, 1) + require.Equal(t, "kubernetes.io/os", podSpec.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions[0].Key) + require.Equal(t, "In", string(podSpec.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions[0].Operator)) + require.Equal(t, []string{"linux"}, podSpec.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions[0].Values) + + // Image pull secrets + require.Len(t, podSpec.ImagePullSecrets, 6) + expectedPullSecrets := []string{"pull-secret-1", "pull-secret-2", "pull-secret-1", "pull-secret-2", "pull-secret-3", "pull-secret-4"} + for i, secret := range podSpec.ImagePullSecrets { + require.Equal(t, expectedPullSecrets[i], secret.Name) + } + + // Environment variables + require.Len(t, podSpec.Containers[0].Env, 11) + expectedEnvVars := []v1.EnvVar{ + { + Name: "KFP_POD_NAME", + ValueFrom: &v1.EnvVarSource{ + FieldRef: &v1.ObjectFieldSelector{ + FieldPath: "metadata.name", + }, + }, + }, + { + Name: "KFP_POD_UID", + ValueFrom: &v1.EnvVarSource{ + FieldRef: &v1.ObjectFieldSelector{ + FieldPath: "metadata.uid", + }, + }, + }, + { + Name: "NAMESPACE", + ValueFrom: &v1.EnvVarSource{ + FieldRef: &v1.ObjectFieldSelector{ + FieldPath: "metadata.namespace", + }, + }, + }, + { + Name: "SECRET_KEY_1", + ValueFrom: &v1.EnvVarSource{ + SecretKeyRef: &v1.SecretKeySelector{ + LocalObjectReference: v1.LocalObjectReference{Name: "secret-1"}, + Key: "secretKey1", + Optional: &[]bool{false}[0], + }, + }, + }, + { + Name: "SECRET_KEY_2", + ValueFrom: &v1.EnvVarSource{ + SecretKeyRef: &v1.SecretKeySelector{ + LocalObjectReference: v1.LocalObjectReference{Name: "secret-1"}, + Key: "secretKey2", + Optional: &[]bool{false}[0], + }, + }, + }, + { + Name: "SECRET_KEY_3", + ValueFrom: &v1.EnvVarSource{ + SecretKeyRef: &v1.SecretKeySelector{ + LocalObjectReference: v1.LocalObjectReference{Name: "secret-2"}, + Key: "secretKey3", + Optional: &[]bool{false}[0], + }, + }, + }, + { + Name: "SECRET_KEY_4", + ValueFrom: &v1.EnvVarSource{ + SecretKeyRef: &v1.SecretKeySelector{ + LocalObjectReference: v1.LocalObjectReference{Name: "secret-3"}, + Key: "secretKey4", + Optional: &[]bool{false}[0], + }, + }, + }, + { + Name: "CFG_KEY_1", + ValueFrom: &v1.EnvVarSource{ + ConfigMapKeyRef: &v1.ConfigMapKeySelector{ + LocalObjectReference: v1.LocalObjectReference{Name: "cfg-1"}, + Key: "cfgKey1", + Optional: &[]bool{false}[0], + }, + }, + }, + { + Name: "CFG_KEY_2", + ValueFrom: &v1.EnvVarSource{ + ConfigMapKeyRef: &v1.ConfigMapKeySelector{ + LocalObjectReference: v1.LocalObjectReference{Name: "cfg-1"}, + Key: "cfgKey2", + Optional: &[]bool{false}[0], + }, + }, + }, + { + Name: "CFG_KEY_3", + ValueFrom: &v1.EnvVarSource{ + ConfigMapKeyRef: &v1.ConfigMapKeySelector{ + LocalObjectReference: v1.LocalObjectReference{Name: "cfg-2"}, + Key: "cfgKey3", + Optional: &[]bool{false}[0], + }, + }, + }, + { + Name: "CFG_KEY_4", + ValueFrom: &v1.EnvVarSource{ + ConfigMapKeyRef: &v1.ConfigMapKeySelector{ + LocalObjectReference: v1.LocalObjectReference{Name: "cfg-3"}, + Key: "cfgKey4", + Optional: &[]bool{false}[0], + }, + }, + }, + } + for i, env := range podSpec.Containers[0].Env { + require.Equal(t, expectedEnvVars[i].Name, env.Name) + require.Equal(t, expectedEnvVars[i].ValueFrom, env.ValueFrom) + } + + // Resource limits and requests + require.Equal(t, "200m", podSpec.Containers[0].Resources.Limits.Cpu().String()) + require.Equal(t, "50Mi", podSpec.Containers[0].Resources.Limits.Memory().String()) + require.Equal(t, "100m", podSpec.Containers[0].Resources.Requests.Cpu().String()) + require.Equal(t, "50Mi", podSpec.Containers[0].Resources.Requests.Memory().String()) + + // Tolerations + require.Len(t, podSpec.Tolerations, 6) + expectedTolerations := []v1.Toleration{ + { + Key: "some_foo_key1", + Operator: "Equal", + Value: "value1", + Effect: "NoSchedule", + }, + { + Key: "some_foo_key2", + Operator: "Exists", + Effect: "NoExecute", + }, + { + Key: "some_foo_key3", + Operator: "Equal", + Value: "value1", + Effect: "NoSchedule", + }, + { + Key: "some_foo_key6", + Operator: "Equal", + Value: "value3", + Effect: "NoSchedule", + }, + { + Key: "some_foo_key4", + Operator: "Equal", + Value: "value2", + Effect: "NoSchedule", + }, + { + Key: "some_foo_key5", + Operator: "Exists", + Effect: "NoExecute", + }, + } + for i, toleration := range podSpec.Tolerations { + require.Equal(t, expectedTolerations[i].Key, toleration.Key) + require.Equal(t, expectedTolerations[i].Operator, toleration.Operator) + require.Equal(t, expectedTolerations[i].Value, toleration.Value) + require.Equal(t, expectedTolerations[i].Effect, toleration.Effect) + } + +} + +// This test creates a DAG with a single task that uses a component with inputs +// and runtime constants. The test verifies that the inputs are correctly passed +// to the Runtime Task. +func TestContainerComponentInputsAndRuntimeConstants(t *testing.T) { + // Create a root DAG execution using basic inputs + runtimeInputs := &pipelinespec.PipelineJob_RuntimeConfig{ + ParameterValues: map[string]*structpb.Value{ + "name_in": structpb.NewStringValue("some_name"), + "number_in": structpb.NewNumberValue(1.0), + "threshold_in": structpb.NewNumberValue(0.1), + "active_in": structpb.NewBoolValue(false), + }, + } + + tc := NewTestContextWithRootExecuted(t, runtimeInputs, "test_data/componentInput.yaml") + + // Run driver for process-inputs + processInputsExecution, processInputsTask := tc.RunContainerDriver("process-inputs", tc.RootTask, nil, false) + require.NotNil(t, processInputsExecution.ExecutorInput.Outputs) + + // Verify input parameters from driver + params := processInputsTask.Inputs.GetParameters() + require.Equal(t, apiv2beta1.IOType_COMPONENT_INPUT, tc.fetchParameter("name", params).GetType()) + require.Equal(t, apiv2beta1.IOType_COMPONENT_INPUT, tc.fetchParameter("number", params).GetType()) + require.Equal(t, apiv2beta1.IOType_COMPONENT_INPUT, tc.fetchParameter("active", params).GetType()) + require.Equal(t, apiv2beta1.IOType_COMPONENT_INPUT, tc.fetchParameter("threshold", params).GetType()) + require.Equal(t, apiv2beta1.IOType_RUNTIME_VALUE_INPUT, tc.fetchParameter("a_runtime_string", params).GetType()) + require.Equal(t, apiv2beta1.IOType_RUNTIME_VALUE_INPUT, tc.fetchParameter("a_runtime_number", params).GetType()) + require.Equal(t, apiv2beta1.IOType_RUNTIME_VALUE_INPUT, tc.fetchParameter("a_runtime_bool", params).GetType()) + + require.Equal(t, processInputsExecution.TaskID, processInputsTask.TaskId) + require.Equal(t, processInputsExecution.ExecutorInput.Inputs.ParameterValues["name"].GetStringValue(), "some_name") + require.Equal(t, processInputsExecution.ExecutorInput.Inputs.ParameterValues["number"].GetNumberValue(), 1.0) + require.Equal(t, processInputsExecution.ExecutorInput.Inputs.ParameterValues["threshold"].GetNumberValue(), 0.1) + require.Equal(t, processInputsExecution.ExecutorInput.Inputs.ParameterValues["active"].GetBoolValue(), false) + require.Equal(t, processInputsExecution.ExecutorInput.Inputs.ParameterValues["a_runtime_string"].GetStringValue(), "foo") + require.Equal(t, processInputsExecution.ExecutorInput.Inputs.ParameterValues["a_runtime_number"].GetNumberValue(), 10.0) + require.Equal(t, processInputsExecution.ExecutorInput.Inputs.ParameterValues["a_runtime_bool"].GetBoolValue(), true) + + // Mock a Launcher run by updating the task with output data + // This test is checking artifact metadata which the mock sets explicitly + launcherExec := tc.RunLauncher(processInputsExecution, map[string][]byte{"/tmp/kfp_outputs/output_metadata.json": []byte("{}")}, true) + require.Len(t, launcherExec.Task.Outputs.Artifacts, 1) + + // Run driver for analyze-inputs + analyzeInputsExecution, _ := tc.RunContainerDriver("analyze-inputs", tc.RootTask, nil, false) + require.NotNil(t, analyzeInputsExecution.ExecutorInput.Outputs) + require.Equal(t, 1, len(analyzeInputsExecution.ExecutorInput.Inputs.Artifacts["input_text"].Artifacts)) + launcherExec = tc.RunLauncher(analyzeInputsExecution, map[string][]byte{"/tmp/kfp_outputs/output_metadata.json": []byte("{}")}, true) + require.Len(t, launcherExec.Task.Outputs.Artifacts, 0) + + // Verify Executor Input has the correct artifact + artifact := analyzeInputsExecution.ExecutorInput.Inputs.Artifacts["input_text"].Artifacts[0] + require.Equal(t, apiv2beta1.Artifact_Dataset.String(), artifact.Type.GetSchemaTitle()) + require.Equal(t, "output_text", artifact.Name) +} + +func TestNestedPipelineOptionalInputChildLevel(t *testing.T) { + // This test validates that when a DAG task (nested pipeline) has optional inputs with defaults, + // and the parent only provides some of those inputs, the child tasks still receive the defaults + // for the inputs that weren't provided. + tc := NewTestContextWithRootExecuted( + t, + &pipelinespec.PipelineJob_RuntimeConfig{}, + "test_data/nested_pipeline_opt_input_child_level_compiled.yaml", + ) + parentTask := tc.RootTask + + // Run the nested pipeline driver - it should receive 3 inputs from root and use defaults for the other 3 + nestedPipelineExecution, nestedPipelineTask := tc.RunDagDriver("nested-pipeline", parentTask) + require.NotNil(t, nestedPipelineExecution) + require.NotNil(t, nestedPipelineTask) + require.Equal(t, apiv2beta1.PipelineTaskDetail_RUNNING, nestedPipelineTask.State) + + // The nested pipeline task should have ALL 6 inputs (3 from parent + 3 defaults) + require.NotNil(t, nestedPipelineTask.Inputs) + require.NotNil(t, nestedPipelineTask.Inputs.Parameters) + + inputParams := make(map[string]*structpb.Value) + for _, param := range nestedPipelineTask.Inputs.Parameters { + inputParams[param.ParameterKey] = param.Value + } + + // Verify all 6 parameters are present + require.Contains(t, inputParams, "nestedInputBool1", "nestedInputBool1 should be present") + require.Contains(t, inputParams, "nestedInputBool2", "nestedInputBool2 should be present (from default)") + require.Contains(t, inputParams, "nestedInputInt1", "nestedInputInt1 should be present") + require.Contains(t, inputParams, "nestedInputInt2", "nestedInputInt2 should be present (from default)") + require.Contains(t, inputParams, "nestedInputStr1", "nestedInputStr1 should be present") + require.Contains(t, inputParams, "nestedInputStr2", "nestedInputStr2 should be present (from default)") + + // Verify the values are correct + // From parent (root pipeline) + require.Equal(t, true, inputParams["nestedInputBool1"].GetBoolValue(), + "nestedInputBool1 should be true (from parent)") + require.Equal(t, 1.0, inputParams["nestedInputInt1"].GetNumberValue(), + "nestedInputInt1 should be 1.0 (from parent)") + require.Equal(t, "Input - pipeline", inputParams["nestedInputStr1"].GetStringValue(), + "nestedInputStr1 should be 'Input - pipeline' (from parent)") + + // From defaults (not provided by parent) + require.Equal(t, false, inputParams["nestedInputBool2"].GetBoolValue(), + "nestedInputBool2 should be false (from default)") + require.Equal(t, 0.0, inputParams["nestedInputInt2"].GetNumberValue(), + "nestedInputInt2 should be 0.0 (from default)") + require.Equal(t, "Input 2 - nested pipeline", inputParams["nestedInputStr2"].GetStringValue(), + "nestedInputStr2 should be 'Input 2 - nested pipeline' (from default)") +} diff --git a/backend/src/v2/driver/driver.go b/backend/src/v2/driver/driver.go index 5ead0076e1b..da1d069ff78 100644 --- a/backend/src/v2/driver/driver.go +++ b/backend/src/v2/driver/driver.go @@ -21,70 +21,17 @@ import ( "strings" "github.com/kubeflow/pipelines/backend/src/apiserver/config/proxy" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/backend/src/v2/driver/resolver" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" "github.com/kubeflow/pipelines/backend/src/v2/component" - "github.com/kubeflow/pipelines/backend/src/v2/metadata" - "github.com/kubeflow/pipelines/kubernetes_platform/go/kubernetesplatform" "google.golang.org/protobuf/encoding/protojson" "google.golang.org/protobuf/types/known/structpb" k8score "k8s.io/api/core/v1" k8sres "k8s.io/apimachinery/pkg/api/resource" ) -// Driver options -type Options struct { - // required, pipeline context name - PipelineName string - // required, KFP run ID - RunID string - // required, Component spec - Component *pipelinespec.ComponentSpec - // optional, iteration index. -1 means not an iteration. - IterationIndex int - - // optional, required only by root DAG driver - RuntimeConfig *pipelinespec.PipelineJob_RuntimeConfig - Namespace string - - // optional, required by non-root drivers - Task *pipelinespec.PipelineTaskSpec - DAGExecutionID int64 - - // optional, required only by container driver - Container *pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec - - // optional, allows to specify kubernetes-specific executor config - KubernetesExecutorConfig *kubernetesplatform.KubernetesExecutorConfig - - // optional, required only if the {{$.pipeline_job_resource_name}} placeholder is used or the run uses a workspace - RunName string - // optional, required only if the {{$.pipeline_job_name}} placeholder is used - RunDisplayName string - - PipelineLogLevel string - - PublishLogs string - - CacheDisabled bool - - DriverType string - - TaskName string // the original name of the task, used for input resolution - - // set to true if ml pipeline server is serving over tls - MLPipelineTLSEnabled bool - - // set to true if metadata server is serving over tls - MLMDTLSEnabled bool - - MLMDServerAddress string - - MLMDServerPort string - - CaCertPath string -} - // TaskConfig needs to stay aligned with the TaskConfig in the SDK. type TaskConfig struct { Affinity *k8score.Affinity `json:"affinity"` @@ -96,38 +43,8 @@ type TaskConfig struct { Resources k8score.ResourceRequirements `json:"resources"` } -// Identifying information used for error messages -func (o Options) info() string { - msg := fmt.Sprintf("pipelineName=%v, runID=%v", o.PipelineName, o.RunID) - if o.Task.GetTaskInfo().GetName() != "" { - msg = msg + fmt.Sprintf(", taskDisplayName=%q", o.Task.GetTaskInfo().GetName()) - } - if o.TaskName != "" { - msg = msg + fmt.Sprintf(", taskName=%q", o.TaskName) - } - if o.Task.GetComponentRef().GetName() != "" { - msg = msg + fmt.Sprintf(", component=%q", o.Task.GetComponentRef().GetName()) - } - if o.DAGExecutionID != 0 { - msg = msg + fmt.Sprintf(", dagExecutionID=%v", o.DAGExecutionID) - } - if o.IterationIndex >= 0 { - msg = msg + fmt.Sprintf(", iterationIndex=%v", o.IterationIndex) - } - if o.RuntimeConfig != nil { - msg = msg + ", runtimeConfig" // this only means runtimeConfig is not empty - } - if o.Component.GetImplementation() != nil { - msg = msg + ", componentSpec" // this only means componentSpec is not empty - } - if o.KubernetesExecutorConfig != nil { - msg = msg + ", KubernetesExecutorConfig" // this only means KubernetesExecutorConfig is not empty - } - return msg -} - type Execution struct { - ID int64 + TaskID string ExecutorInput *pipelinespec.ExecutorInput IterationCount *int // number of iterations, -1 means not an iterator Condition *bool // true -> trigger the task, false -> not trigger the task, nil -> the task is unconditional @@ -152,16 +69,17 @@ func getPodResource( ) (*k8sres.Quantity, error) { var resolved string - if new != "" { + switch { + case new != "": var err error - resolved, err = resolvePodSpecInputRuntimeParameter(new, executorInput) + resolved, err = resolver.ResolveParameterOrPipelineChannel(new, executorInput) if err != nil { return nil, fmt.Errorf("failed to resolve executor input when retrieving pod resource: %w", err) } - } else if old != 0 { + case old != 0: resolved = fmt.Sprintf(oldFmtStr, old) - } else { + default: return nil, nil } @@ -216,15 +134,16 @@ func getTaskConfigOptions( return passthroughEnabled, setOnPod } -// initPodSpecPatch generates a strategic merge patch for pod spec, it is merged -// to container base template generated in compiler/container.go. Therefore, only +// initPodSpecPatch generates a strategic merge patch for pod spec; it is merged +// to the container base template generated in compiler/container.go. Therefore, only // dynamic values are patched here. The volume mounts / configmap mounts are -// defined in compiler, because they are static. +// defined in the compiler because they are static. func initPodSpecPatch( container *pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec, componentSpec *pipelinespec.ComponentSpec, executorInput *pipelinespec.ExecutorInput, - executionID int64, + taskID string, + parentTaskID string, pipelineName string, runID string, runName string, @@ -232,18 +151,16 @@ func initPodSpecPatch( publishLogs string, cacheDisabled string, taskConfig *TaskConfig, + fingerPrint string, + iterationIndex *int, + taskName string, mlPipelineTLSEnabled bool, - metadataTLSEnabled bool, caCertPath string, ) (*k8score.PodSpec, error) { executorInputJSON, err := protojson.Marshal(executorInput) if err != nil { return nil, fmt.Errorf("failed to init podSpecPatch: %w", err) } - componentJSON, err := protojson.Marshal(componentSpec) - if err != nil { - return nil, fmt.Errorf("failed to init podSpecPatch: %w", err) - } // Convert environment variables userEnvVar := make([]k8score.EnvVar, 0) @@ -265,28 +182,22 @@ func initPodSpecPatch( userCmdArgs = append(userCmdArgs, container.Args...) launcherCmd := []string{ component.KFPLauncherPath, - // TODO(Bobgy): no need to pass pipeline_name and run_id, these info can be fetched via pipeline context and pipeline run context which have been created by root DAG driver. "--pipeline_name", pipelineName, "--run_id", runID, - "--execution_id", fmt.Sprintf("%v", executionID), + "--task_id", fmt.Sprintf("%v", taskID), + "--parent_task_id", fmt.Sprintf("%v", parentTaskID), "--executor_input", string(executorInputJSON), - "--component_spec", string(componentJSON), "--pod_name", fmt.Sprintf("$(%s)", component.EnvPodName), "--pod_uid", fmt.Sprintf("$(%s)", component.EnvPodUID), - "--mlmd_server_address", - fmt.Sprintf("$(%s)", component.EnvMetadataHost), - "--mlmd_server_port", - fmt.Sprintf("$(%s)", component.EnvMetadataPort), "--publish_logs", publishLogs, + "--fingerprint", fingerPrint, + "--task_name", taskName, } if mlPipelineTLSEnabled { launcherCmd = append(launcherCmd, "--ml_pipeline_tls_enabled") } - if metadataTLSEnabled { - launcherCmd = append(launcherCmd, "--metadata_tls_enabled") - } if caCertPath != "" { launcherCmd = append(launcherCmd, "--ca_cert_path", caCertPath) } @@ -300,7 +211,10 @@ func initPodSpecPatch( if publishLogs == "true" { launcherCmd = append(launcherCmd, "--publish_logs", publishLogs) } - launcherCmd = append(launcherCmd, "--") // separater before user command and args + if iterationIndex != nil { + launcherCmd = append(launcherCmd, "--iteration_index", fmt.Sprintf("%v", *iterationIndex)) + } + launcherCmd = append(launcherCmd, "--") // separate before user command and args res := k8score.ResourceRequirements{ Limits: map[k8score.ResourceName]k8sres.Quantity{}, Requests: map[k8score.ResourceName]k8sres.Quantity{}, @@ -362,7 +276,7 @@ func initPodSpecPatch( if accelerator != nil { var acceleratorType string if accelerator.GetResourceType() != "" { - acceleratorType, err = resolvePodSpecInputRuntimeParameter(accelerator.GetResourceType(), executorInput) + acceleratorType, err = resolver.ResolveParameterOrPipelineChannel(accelerator.GetResourceType(), executorInput) if err != nil { return nil, fmt.Errorf("failed to init podSpecPatch: %w", err) } @@ -375,7 +289,7 @@ func initPodSpecPatch( if accelerator.GetResourceCount() != "" { var err error - acceleratorCount, err = resolvePodSpecInputRuntimeParameter(accelerator.GetResourceCount(), executorInput) + acceleratorCount, err = resolver.ResolveParameterOrPipelineChannel(accelerator.GetResourceCount(), executorInput) if err != nil { return nil, fmt.Errorf("failed to init podSpecPatch: %w", err) } @@ -392,7 +306,7 @@ func initPodSpecPatch( } } - containerImage, err := resolvePodSpecInputRuntimeParameter(container.Image, executorInput) + containerImage, err := resolver.ResolveParameterOrPipelineChannel(container.Image, executorInput) if err != nil { return nil, fmt.Errorf("failed to init podSpecPatch: %w", err) } @@ -406,6 +320,35 @@ func initPodSpecPatch( }}, } + // Always add KFP_POD_NAME and KFP_POD_UID environment variables using downward API + // These are required for the launcher to function properly + kfpEnvVars := []k8score.EnvVar{ + { + Name: component.EnvPodName, + ValueFrom: &k8score.EnvVarSource{ + FieldRef: &k8score.ObjectFieldSelector{ + FieldPath: "metadata.name", + }, + }, + }, + { + Name: component.EnvPodUID, + ValueFrom: &k8score.EnvVarSource{ + FieldRef: &k8score.ObjectFieldSelector{ + FieldPath: "metadata.uid", + }, + }, + }, + { + Name: "NAMESPACE", + ValueFrom: &k8score.EnvVarSource{ + FieldRef: &k8score.ObjectFieldSelector{ + FieldPath: "metadata.namespace", + }, + }, + }, + } + if setOnTaskConfig[pipelinespec.TaskConfigPassthroughType_ENV] { taskConfig.Env = userEnvVar } @@ -414,6 +357,9 @@ func initPodSpecPatch( podSpec.Containers[0].Env = userEnvVar } + // Always append KFP environment variables to the pod spec + podSpec.Containers[0].Env = append(podSpec.Containers[0].Env, kfpEnvVars...) + if setOnTaskConfig[pipelinespec.TaskConfigPassthroughType_RESOURCES] { taskConfig.Resources = res } @@ -632,29 +578,16 @@ func addModelcarsToPodSpec( } } -func validateNonRoot(opts Options) error { - if opts.PipelineName == "" { - return fmt.Errorf("pipeline name is required") - } - if opts.RunID == "" { - return fmt.Errorf("KFP run ID is required") - } - if opts.Component == nil { - return fmt.Errorf("component spec is required") - } - if opts.Task.GetTaskInfo().GetName() == "" { - return fmt.Errorf("task spec is required") - } - if opts.RuntimeConfig != nil { - return fmt.Errorf("runtime config is unnecessary") - } - if opts.DAGExecutionID == 0 { - return fmt.Errorf("DAG execution ID is required") - } - return nil -} - -// provisionOutputs prepares output references that will get saved to MLMD. +// provisionOutputs prepares the executorInputs.Outputs field for the executor. +// This is done by computing the executor output file path and setting the +// executorInputs.Outputs fields to point to it. +// +// The executor output file is a JSON file that contains the executor output +// parameters and artifacts. +// +// The executor output file is written to the executor output directory, which +// is a directory under the task root. The executor output directory is +// determined by the executor output file path. func provisionOutputs( pipelineRoot, taskName string, @@ -689,7 +622,7 @@ func provisionOutputs( // artifacts (dsl.get_uri) by allowing the SDK to infer the task root from // the executor output file's directory (set below) and convert it back to // a remote URI at runtime. - taskRootRemote := metadata.GenerateOutputURI(pipelineRoot, []string{taskName, outputURISalt}, false) + taskRootRemote := util.GenerateOutputURI(pipelineRoot, []string{taskName, outputURISalt}, false) // Set per-artifact output URIs under the task root. for name, artifact := range artifacts { @@ -700,7 +633,7 @@ func provisionOutputs( Name: name, // Do not preserve the query string for output artifacts, as otherwise // they'd appear in file and artifact names. - Uri: metadata.GenerateOutputURI(taskRootRemote, []string{name}, false), + Uri: util.GenerateOutputURI(taskRootRemote, []string{name}, false), Type: artifact.GetArtifactType(), Metadata: artifact.GetMetadata(), }, diff --git a/backend/src/v2/driver/driver_test.go b/backend/src/v2/driver/driver_test.go index cdb0fb7f30c..4e1831c842f 100644 --- a/backend/src/v2/driver/driver_test.go +++ b/backend/src/v2/driver/driver_test.go @@ -24,6 +24,7 @@ import ( "github.com/kubeflow/pipelines/backend/src/apiserver/config/proxy" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + "github.com/kubeflow/pipelines/backend/src/v2/driver/common" "github.com/kubeflow/pipelines/kubernetes_platform/go/kubernetesplatform" "github.com/spf13/viper" "github.com/stretchr/testify/assert" @@ -42,7 +43,7 @@ func Test_initPodSpecPatch_acceleratorConfig(t *testing.T) { container *pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec componentSpec *pipelinespec.ComponentSpec executorInput *pipelinespec.ExecutorInput - executionID int64 + executionID string pipelineName string runID string pipelineLogLevel string @@ -86,7 +87,7 @@ func Test_initPodSpecPatch_acceleratorConfig(t *testing.T) { }, }, nil, - 1, + "1", "MyPipeline", "a1b2c3d4-a1b2-a1b2-a1b2-a1b2c3d4e5f6", "1", @@ -127,7 +128,7 @@ func Test_initPodSpecPatch_acceleratorConfig(t *testing.T) { }, }, nil, - 1, + "1", "MyPipeline", "a1b2c3d4-a1b2-a1b2-a1b2-a1b2c3d4e5f6", "1", @@ -168,7 +169,7 @@ func Test_initPodSpecPatch_acceleratorConfig(t *testing.T) { }, }, nil, - 1, + "1", "MyPipeline", "a1b2c3d4-a1b2-a1b2-a1b2-a1b2c3d4e5f6", "1", @@ -209,7 +210,7 @@ func Test_initPodSpecPatch_acceleratorConfig(t *testing.T) { }, }, nil, - 1, + "1", "MyPipeline", "a1b2c3d4-a1b2-a1b2-a1b2-a1b2c3d4e5f6", "1", @@ -250,7 +251,7 @@ func Test_initPodSpecPatch_acceleratorConfig(t *testing.T) { }, }, nil, - 1, + "1", "MyPipeline", "a1b2c3d4-a1b2-a1b2-a1b2-a1b2c3d4e5f6", "1", @@ -270,6 +271,7 @@ func Test_initPodSpecPatch_acceleratorConfig(t *testing.T) { tt.args.componentSpec, tt.args.executorInput, tt.args.executionID, + "", tt.args.pipelineName, tt.args.runID, "my-run-name", @@ -277,10 +279,11 @@ func Test_initPodSpecPatch_acceleratorConfig(t *testing.T) { tt.args.publishLogs, "false", taskConfig, - false, - false, "", - ) + nil, + "", + false, + "") if tt.wantErr { assert.Nil(t, podSpec) assert.NotNil(t, err) @@ -388,7 +391,8 @@ func Test_initPodSpecPatch_resource_placeholders(t *testing.T) { containerSpec, componentSpec, executorInput, - 27, + "27", + "", "test", "0254beba-0be4-4065-8d97-7dc5e3adf300", "my-run-name", @@ -396,10 +400,11 @@ func Test_initPodSpecPatch_resource_placeholders(t *testing.T) { "false", "false", taskConfig, - false, - false, "", - ) + nil, + "", + false, + "") assert.Nil(t, err) assert.Len(t, podSpec.Containers, 1) @@ -434,22 +439,7 @@ func Test_initPodSpecPatch_legacy_resources(t *testing.T) { executorInput := &pipelinespec.ExecutorInput{} taskConfig := &TaskConfig{} - podSpec, err := initPodSpecPatch( - containerSpec, - componentSpec, - executorInput, - 27, - "test", - "0254beba-0be4-4065-8d97-7dc5e3adf300", - "my-run-name", - "1", - "false", - "false", - taskConfig, - false, - false, - "", - ) + podSpec, err := initPodSpecPatch(containerSpec, componentSpec, executorInput, "27", "", "test", "0254beba-0be4-4065-8d97-7dc5e3adf300", "my-run-name", "1", "false", "false", taskConfig, "", nil, "", false, "") assert.Nil(t, err) assert.Len(t, podSpec.Containers, 1) @@ -486,22 +476,7 @@ func Test_initPodSpecPatch_modelcar_input_artifact(t *testing.T) { } taskConfig := &TaskConfig{} - podSpec, err := initPodSpecPatch( - containerSpec, - componentSpec, - executorInput, - 27, - "test", - "0254beba-0be4-4065-8d97-7dc5e3adf300", - "my-run-name", - "1", - "false", - "false", - taskConfig, - false, - false, - "", - ) + podSpec, err := initPodSpecPatch(containerSpec, componentSpec, executorInput, "27", "", "test", "0254beba-0be4-4065-8d97-7dc5e3adf300", "my-run-name", "1", "false", "false", taskConfig, "", nil, "", false, "") assert.Nil(t, err) assert.Len(t, podSpec.InitContainers, 1) @@ -532,23 +507,7 @@ func Test_initPodSpecPatch_modelcar_input_artifact(t *testing.T) { // Validate that setting publishLogs to true propagates to the driver container // commands in the podSpec. func Test_initPodSpecPatch_publishLogs(t *testing.T) { - podSpec, err := initPodSpecPatch( - &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{}, - &pipelinespec.ComponentSpec{}, - &pipelinespec.ExecutorInput{}, - // executorInput, - 27, - "test", - "0254beba-0be4-4065-8d97-7dc5e3adf300", - "my-run-name", - "1", - "true", - "false", - nil, - false, - false, - "", - ) + podSpec, err := initPodSpecPatch(&pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{}, &pipelinespec.ComponentSpec{}, &pipelinespec.ExecutorInput{}, "27", "", "test", "0254beba-0be4-4065-8d97-7dc5e3adf300", "my-run-name", "1", "true", "false", nil, "", nil, "", false, "") assert.Nil(t, err) cmd := podSpec.Containers[0].Command assert.Contains(t, cmd, "--publish_logs") @@ -567,7 +526,7 @@ func Test_initPodSpecPatch_resourceRequests(t *testing.T) { container *pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec componentSpec *pipelinespec.ComponentSpec executorInput *pipelinespec.ExecutorInput - executionID int64 + executionID string pipelineName string runID string pipelineLogLevel string @@ -608,7 +567,7 @@ func Test_initPodSpecPatch_resourceRequests(t *testing.T) { }, }, nil, - 1, + "1", "MyPipeline", "a1b2c3d4-a1b2-a1b2-a1b2-a1b2c3d4e5f6", "1", @@ -646,7 +605,7 @@ func Test_initPodSpecPatch_resourceRequests(t *testing.T) { }, }, nil, - 1, + "1", "MyPipeline", "a1b2c3d4-a1b2-a1b2-a1b2-a1b2c3d4e5f6", "1", @@ -660,22 +619,7 @@ func Test_initPodSpecPatch_resourceRequests(t *testing.T) { t.Run(tt.name, func(t *testing.T) { taskConfig := &TaskConfig{} - podSpec, err := initPodSpecPatch( - tt.args.container, - tt.args.componentSpec, - tt.args.executorInput, - tt.args.executionID, - tt.args.pipelineName, - tt.args.runID, - "my-run-name", - tt.args.pipelineLogLevel, - tt.args.publishLogs, - "false", - taskConfig, - false, - false, - "", - ) + podSpec, err := initPodSpecPatch(tt.args.container, tt.args.componentSpec, tt.args.executorInput, tt.args.executionID, "", tt.args.pipelineName, tt.args.runID, "my-run-name", tt.args.pipelineLogLevel, tt.args.publishLogs, "false", taskConfig, "", nil, "", false, "") assert.Nil(t, err) assert.NotEmpty(t, podSpec) podSpecString, err := json.Marshal(podSpec) @@ -718,22 +662,7 @@ func Test_initPodSpecPatch_TaskConfig_ForwardsResourcesOnly(t *testing.T) { executorInput := &pipelinespec.ExecutorInput{} taskCfg := &TaskConfig{} - podSpec, err := initPodSpecPatch( - containerSpec, - componentSpec, - executorInput, - 27, - "test", - "0254beba-0be4-4065-8d97-7dc5e3adf300", - "my-run-name", - "1", - "false", - "false", - taskCfg, - false, - false, - "", - ) + podSpec, err := initPodSpecPatch(containerSpec, componentSpec, executorInput, "27", "", "test", "0254beba-0be4-4065-8d97-7dc5e3adf300", "my-run-name", "1", "false", "false", taskCfg, "", nil, "", false, "") assert.Nil(t, err) assert.NotNil(t, podSpec) assert.Len(t, podSpec.Containers, 1) @@ -781,23 +710,9 @@ func Test_initPodSpecPatch_inputTaskFinalStatus(t *testing.T) { }, }, } + require.NoError(t, err) - podSpec, err := initPodSpecPatch( - containerSpec, - componentSpec, - executorInput, - 27, - "test", - "0254beba-0be4-4065-8d97-7dc5e3adf300", - "my-run-name", - "1", - "false", - "false", - nil, - false, - false, - "", - ) + podSpec, err := initPodSpecPatch(containerSpec, componentSpec, executorInput, "27", "", "test", "0254beba-0be4-4065-8d97-7dc5e3adf300", "my-run-name", "1", "false", "false", nil, "", nil, "", false, "") require.Nil(t, err) expectedExecutorInput := map[string]interface{}{ @@ -812,18 +727,7 @@ func Test_initPodSpecPatch_inputTaskFinalStatus(t *testing.T) { }, }, } - expectedComponentSpec := map[string]interface{}{ - "executorLabel": "exec-exit-op", - "inputDefinitions": map[string]interface{}{ - "parameters": map[string]interface{}{ - "status": map[string]interface{}{ - "parameterType": "TASK_FINAL_STATUS", - }, - }, - }, - } actualExecutorInput := map[string]interface{}{} - actualComponentSpec := map[string]interface{}{} for i, arg := range podSpec.Containers[0].Command { if arg == "--executor_input" { @@ -831,14 +735,14 @@ func Test_initPodSpecPatch_inputTaskFinalStatus(t *testing.T) { fmt.Println(podSpec.Containers[0].Command[i+1]) require.Nil(t, err) } - if arg == "--component_spec" { - err := json.Unmarshal([]byte(podSpec.Containers[0].Command[i+1]), &actualComponentSpec) - require.Nil(t, err) - } } assert.Equal(t, expectedExecutorInput, actualExecutorInput) - assert.Equal(t, expectedComponentSpec, actualComponentSpec) + + // Verify component spec is not passed to the launcher (it's not needed in current implementation) + for _, arg := range podSpec.Containers[0].Command { + assert.NotEqual(t, "--component_spec", arg, "component_spec should not be in launcher command") + } } func TestNeedsWorkspaceMount(t *testing.T) { @@ -982,22 +886,7 @@ func Test_initPodSpecPatch_WorkspaceRequiresRunName(t *testing.T) { }, } taskCfg := &TaskConfig{} - _, err := initPodSpecPatch( - containerSpec, - componentSpec, - executorInput, - 27, - "test", - "run-id", - "", // runName intentionally empty - "1", - "false", - "false", - taskCfg, - false, - false, - "", - ) + _, err := initPodSpecPatch(containerSpec, componentSpec, executorInput, "27", "", "test", "run-id", "", "1", "false", "false", taskCfg, "", nil, "", false, "") require.NotNil(t, err) } @@ -1108,10 +997,7 @@ func TestWorkspaceMount_PassthroughVolumes_CaptureOnly(t *testing.T) { }, } taskCfg := &TaskConfig{} - podSpec, err := initPodSpecPatch( - containerSpec, componentSpec, executorInput, - 27, "test", "run", "my-run-name", "1", "false", "false", taskCfg, false, false, "", - ) + podSpec, err := initPodSpecPatch(containerSpec, componentSpec, executorInput, "27", "", "test", "run", "my-run-name", "1", "false", "false", taskCfg, "", nil, "", false, "") assert.Nil(t, err) // Should not mount workspace to pod (no volumes on pod), only capture to TaskConfig @@ -1151,10 +1037,7 @@ func TestWorkspaceMount_PassthroughVolumes_ApplyAndCapture(t *testing.T) { }, } taskCfg := &TaskConfig{} - podSpec, err := initPodSpecPatch( - containerSpec, componentSpec, executorInput, - 27, "test", "run", "my-run-name", "1", "false", "false", taskCfg, false, false, "", - ) + podSpec, err := initPodSpecPatch(containerSpec, componentSpec, executorInput, "27", "", "test", "run", "my-run-name", "1", "false", "false", taskCfg, "", nil, "", false, "") assert.Nil(t, err) // Should mount workspace to pod and also capture to TaskConfig assert.NotEmpty(t, podSpec.Volumes) @@ -1205,27 +1088,26 @@ func Test_initPodSpecPatch_TaskConfig_Env_Passthrough_CaptureOnly(t *testing.T) } executorInput := &pipelinespec.ExecutorInput{} taskCfg := &TaskConfig{} - podSpec, err := initPodSpecPatch( - containerSpec, - componentSpec, - executorInput, - 27, - "test", - "run", - "my-run-name", - "1", - "false", - "false", - taskCfg, - false, - false, - "", - ) + podSpec, err := initPodSpecPatch(containerSpec, componentSpec, executorInput, "27", "", "test", "run", "my-run-name", "1", "false", "false", taskCfg, "", nil, "", false, "") assert.Nil(t, err) - // Env should be captured to TaskConfig only, not applied to pod - assert.Empty(t, podSpec.Containers[0].Env) + // User-defined env should be captured to TaskConfig only, not applied to pod + // However, KFP env vars (KFP_POD_NAME, KFP_POD_UID, NAMESPACE) are always added to pod + assert.Len(t, podSpec.Containers[0].Env, 3) + + // Verify KFP env vars are present in pod + kfpEnvVars := make(map[string]bool) + for _, env := range podSpec.Containers[0].Env { + kfpEnvVars[env.Name] = true + } + assert.True(t, kfpEnvVars["KFP_POD_NAME"]) + assert.True(t, kfpEnvVars["KFP_POD_UID"]) + assert.True(t, kfpEnvVars["NAMESPACE"]) + + // Verify user-defined FOO env is NOT in pod spec + assert.False(t, kfpEnvVars["FOO"]) + // Verify user-defined env is captured in TaskConfig if assert.Len(t, taskCfg.Env, 1) { assert.Equal(t, "FOO", taskCfg.Env[0].Name) assert.Equal(t, "bar", taskCfg.Env[0].Value) @@ -1252,22 +1134,7 @@ func Test_initPodSpecPatch_TaskConfig_Resources_Passthrough_ApplyAndCapture(t *t } executorInput := &pipelinespec.ExecutorInput{} taskCfg := &TaskConfig{} - podSpec, err := initPodSpecPatch( - containerSpec, - componentSpec, - executorInput, - 27, - "test", - "run", - "my-run-name", - "1", - "false", - "false", - taskCfg, - false, - false, - "", - ) + podSpec, err := initPodSpecPatch(containerSpec, componentSpec, executorInput, "27", "", "test", "run", "my-run-name", "1", "false", "false", taskCfg, "", nil, "", false, "") assert.Nil(t, err) // Resources should be both on pod and in TaskConfig assert.NotEmpty(t, podSpec.Containers[0].Resources.Requests) @@ -1318,9 +1185,8 @@ func Test_initPodSpecPatch_TaskConfig_Affinity_NodeSelector_Tolerations_Passthro }}, } - opts := Options{ + opts := common.Options{ PipelineName: "p", - RunID: "r", Component: componentSpec, Container: containerSpec, KubernetesExecutorConfig: k8sExecCfg, @@ -1330,32 +1196,14 @@ func Test_initPodSpecPatch_TaskConfig_Affinity_NodeSelector_Tolerations_Passthro taskCfg := &TaskConfig{} - podSpec, err := initPodSpecPatch( - containerSpec, - componentSpec, - executorInput, - 27, - "test", - "run", - "my-run-name", - "1", - "false", - "false", - taskCfg, - false, - false, - "", - ) + podSpec, err := initPodSpecPatch(containerSpec, componentSpec, executorInput, "27", "", "test", "run", "my-run-name", "1", "false", "false", taskCfg, "", nil, "", false, "") assert.Nil(t, err) err = extendPodSpecPatch( context.Background(), podSpec, opts, - nil, - nil, - nil, - map[string]*structpb.Value{}, + mapToIOParameters(map[string]*structpb.Value{}), taskCfg, ) assert.Nil(t, err) @@ -1418,9 +1266,8 @@ func Test_initPodSpecPatch_TaskConfig_Affinity_NodeSelector_Tolerations_ApplyAnd }}, } - opts := Options{ + opts := common.Options{ PipelineName: "p", - RunID: "r", Component: componentSpec, Container: containerSpec, KubernetesExecutorConfig: k8sExecCfg, @@ -1429,32 +1276,14 @@ func Test_initPodSpecPatch_TaskConfig_Affinity_NodeSelector_Tolerations_ApplyAnd executorInput := &pipelinespec.ExecutorInput{Inputs: &pipelinespec.ExecutorInput_Inputs{ParameterValues: map[string]*structpb.Value{}}} taskCfg := &TaskConfig{} - podSpec, err := initPodSpecPatch( - containerSpec, - componentSpec, - executorInput, - 27, - "test", - "run", - "my-run-name", - "1", - "false", - "false", - taskCfg, - false, - false, - "", - ) + podSpec, err := initPodSpecPatch(containerSpec, componentSpec, executorInput, "27", "", "test", "run", "my-run-name", "1", "false", "false", taskCfg, "", nil, "", false, "") assert.Nil(t, err) err = extendPodSpecPatch( context.Background(), podSpec, opts, - nil, - nil, - nil, - map[string]*structpb.Value{}, + mapToIOParameters(map[string]*structpb.Value{}), taskCfg, ) assert.Nil(t, err) diff --git a/backend/src/v2/driver/k8s.go b/backend/src/v2/driver/k8s.go index b69faf60064..84844328525 100644 --- a/backend/src/v2/driver/k8s.go +++ b/backend/src/v2/driver/k8s.go @@ -19,23 +19,21 @@ import ( "encoding/json" "errors" "fmt" - "time" "github.com/golang/glog" "github.com/google/uuid" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + apiV2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" "github.com/kubeflow/pipelines/backend/src/common/util" - "github.com/kubeflow/pipelines/backend/src/v2/cacheutils" + "github.com/kubeflow/pipelines/backend/src/v2/client_manager" "github.com/kubeflow/pipelines/backend/src/v2/component" - "github.com/kubeflow/pipelines/backend/src/v2/config" - "github.com/kubeflow/pipelines/backend/src/v2/metadata" + "github.com/kubeflow/pipelines/backend/src/v2/driver/common" + "github.com/kubeflow/pipelines/backend/src/v2/driver/resolver" "github.com/kubeflow/pipelines/kubernetes_platform/go/kubernetesplatform" - pb "github.com/kubeflow/pipelines/third_party/ml-metadata/go/ml_metadata" "google.golang.org/protobuf/types/known/structpb" k8score "k8s.io/api/core/v1" k8sres "k8s.io/apimachinery/pkg/api/resource" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "k8s.io/client-go/kubernetes" ) var accessModeMap = map[string]k8score.PersistentVolumeAccessMode{ @@ -53,50 +51,15 @@ var dummyImages = map[string]string{ // kubernetesPlatformOps() carries out the Kubernetes-specific operations, such as create PVC, // delete PVC, etc. In these operations we skip the launcher due to there being no user container. // It also prepublishes and publishes the execution, which are usually done in the launcher. -func kubernetesPlatformOps( - ctx context.Context, - mlmd *metadata.Client, - cacheClient cacheutils.Client, - execution *Execution, - ecfg *metadata.ExecutionConfig, - opts *Options, -) (err error) { - defer func() { - if err != nil { - err = fmt.Errorf("failed to %s and publish execution %s: %w", dummyImages[opts.Container.Image], opts.Task.GetTaskInfo().GetName(), err) - } - }() - // If we cannot create Kubernetes client, we cannot publish this execution - k8sClient, err := createK8sClient() - if err != nil { - return fmt.Errorf("cannot generate k8s clientset: %w", err) - } - - var outputParameters map[string]*structpb.Value - var createdExecution *metadata.Execution - status := pb.Execution_FAILED - var pvcName string - defer func() { - // We publish the execution, no matter this operartion succeeds or not - perr := publishDriverExecution(k8sClient, mlmd, ctx, createdExecution, outputParameters, nil, status) - if perr != nil && err != nil { - err = fmt.Errorf("failed to publish driver execution: %s. Also failed the Kubernetes platform operation: %s", perr.Error(), err.Error()) - } else if perr != nil { - err = fmt.Errorf("failed to publish driver execution: %w", perr) - } - }() - +func kubernetesPlatformOps(ctx context.Context, clientManager client_manager.ClientManagerInterface, execution *Execution, taskToCreate *apiV2beta1.PipelineTaskDetail, opts *common.Options) (err error) { switch opts.Container.Image { case "argostub/createpvc": - pvcName, createdExecution, status, err = createPVC(ctx, k8sClient, *execution, opts, cacheClient, mlmd, ecfg) + err = createPVCTask(ctx, clientManager, execution, opts, taskToCreate) if err != nil { return err } - outputParameters = map[string]*structpb.Value{ - "name": structpb.NewStringValue(pvcName), - } case "argostub/deletepvc": - if createdExecution, status, err = deletePVC(ctx, k8sClient, *execution, opts, cacheClient, mlmd, ecfg); err != nil { + if err = deletePVCTask(ctx, clientManager, execution, opts, taskToCreate); err != nil { return err } default: @@ -117,11 +80,8 @@ func GetWorkspacePVCName(runName string) string { func extendPodSpecPatch( ctx context.Context, podSpec *k8score.PodSpec, - opts Options, - dag *metadata.DAG, - pipeline *metadata.Pipeline, - mlmd *metadata.Client, - inputParams map[string]*structpb.Value, + opts common.Options, + inputParams []*apiV2beta1.PipelineTaskDetail_InputOutputs_IOParameter, taskConfig *TaskConfig, ) error { kubernetesExecutorConfig := opts.KubernetesExecutorConfig @@ -140,8 +100,7 @@ func extendPodSpecPatch( // Get volume mount information if kubernetesExecutorConfig.GetPvcMount() != nil { - volumeMounts, volumes, err := makeVolumeMountPatch(ctx, opts, kubernetesExecutorConfig.GetPvcMount(), - dag, pipeline, mlmd, inputParams) + volumeMounts, volumes, err := makeVolumeMountPatch(opts, kubernetesExecutorConfig.GetPvcMount(), inputParams) if err != nil { return fmt.Errorf("failed to extract volume mount info: %w", err) } @@ -183,10 +142,11 @@ func extendPodSpecPatch( // value. In that case we avoid appending an empty selector to the pod spec. skipNodeSelector := false if kubernetesExecutorConfig.GetNodeSelector().GetNodeSelectorJson() != nil { - err := resolveK8sJsonParameter(ctx, opts, dag, pipeline, mlmd, - kubernetesExecutorConfig.GetNodeSelector().GetNodeSelectorJson(), inputParams, &nodeSelector) + err := resolver.ResolveK8sJSONParameter( + opts, kubernetesExecutorConfig.GetNodeSelector().GetNodeSelectorJson(), + inputParams, &nodeSelector) if err != nil { - if errors.Is(err, ErrResolvedParameterNull) { + if errors.Is(err, resolver.ErrResolvedParameterNull) { skipNodeSelector = true } else { return fmt.Errorf("failed to resolve node selector: %w", err) @@ -216,10 +176,9 @@ func extendPodSpecPatch( if toleration != nil { k8sToleration := &k8score.Toleration{} if toleration.TolerationJson != nil { - resolvedParam, err := resolveInputParameter(ctx, dag, pipeline, opts, mlmd, - toleration.GetTolerationJson(), inputParams) + resolvedParam, _, err := resolver.ResolveInputParameter(opts, toleration.GetTolerationJson(), inputParams) if err != nil { - if errors.Is(err, ErrResolvedParameterNull) { + if errors.Is(err, resolver.ErrResolvedParameterNull) { continue // Skip applying the patch for this null/optional parameter } return fmt.Errorf("failed to resolve toleration: %w", err) @@ -229,12 +188,12 @@ func extendPodSpecPatch( // the field accepts both, and in both cases the tolerations are appended // to the total executor pod toleration list. var paramJSON []byte - isSingleToleration := resolvedParam.GetStructValue() != nil - isListToleration := resolvedParam.GetListValue() != nil + isSingleToleration := resolvedParam.GetValue().GetStructValue() != nil + isListToleration := resolvedParam.GetValue().GetListValue() != nil if isSingleToleration { - structVal := resolvedParam.GetStructValue() + structVal := resolvedParam.GetValue().GetStructValue() if structVal != nil && len(structVal.Fields) > 0 { - paramJSON, err = resolvedParam.GetStructValue().MarshalJSON() + paramJSON, err = resolvedParam.GetValue().GetStructValue().MarshalJSON() if err != nil { return err } @@ -247,9 +206,9 @@ func extendPodSpecPatch( glog.V(4).Info("encountered empty tolerations struct, ignoring.") } } else if isListToleration { - listVal := resolvedParam.GetListValue() + listVal := resolvedParam.GetValue().GetListValue() if listVal != nil && len(listVal.Values) > 0 { - paramJSON, err = resolvedParam.GetListValue().MarshalJSON() + paramJSON, err = resolvedParam.GetValue().GetListValue().MarshalJSON() if err != nil { return err } @@ -288,10 +247,9 @@ func extendPodSpecPatch( for _, secretAsVolume := range kubernetesExecutorConfig.GetSecretAsVolume() { var secretName string if secretAsVolume.SecretNameParameter != nil { - resolvedSecretName, err := resolveInputParameterStr(ctx, dag, pipeline, opts, mlmd, - secretAsVolume.SecretNameParameter, inputParams) + resolvedSecretName, err := resolver.ResolveInputParameterStr(opts, secretAsVolume.SecretNameParameter, inputParams) if err != nil { - if errors.Is(err, ErrResolvedParameterNull) { + if errors.Is(err, resolver.ErrResolvedParameterNull) { continue } return fmt.Errorf("failed to resolve secret name: %w", err) @@ -350,10 +308,9 @@ func extendPodSpecPatch( var secretName string if secretAsEnv.SecretNameParameter != nil { - resolvedSecretName, err := resolveInputParameterStr(ctx, dag, pipeline, opts, mlmd, - secretAsEnv.SecretNameParameter, inputParams) + resolvedSecretName, err := resolver.ResolveInputParameterStr(opts, secretAsEnv.SecretNameParameter, inputParams) if err != nil { - if errors.Is(err, ErrResolvedParameterNull) { + if errors.Is(err, resolver.ErrResolvedParameterNull) { continue } return fmt.Errorf("failed to resolve secret name: %w", err) @@ -366,7 +323,7 @@ func extendPodSpecPatch( "secret environment variable in executor config") } - secretEnvVar.ValueFrom.SecretKeyRef.LocalObjectReference.Name = secretName + secretEnvVar.ValueFrom.SecretKeyRef.Name = secretName if setOnPod[pipelinespec.TaskConfigPassthroughType_ENV] { podSpec.Containers[0].Env = append(podSpec.Containers[0].Env, secretEnvVar) @@ -382,10 +339,10 @@ func extendPodSpecPatch( for _, configMapAsVolume := range kubernetesExecutorConfig.GetConfigMapAsVolume() { var configMapName string if configMapAsVolume.ConfigMapNameParameter != nil { - resolvedConfigMapName, err := resolveInputParameterStr(ctx, dag, pipeline, opts, mlmd, + resolvedConfigMapName, err := resolver.ResolveInputParameterStr(opts, configMapAsVolume.ConfigMapNameParameter, inputParams) if err != nil { - if errors.Is(err, ErrResolvedParameterNull) { + if errors.Is(err, resolver.ErrResolvedParameterNull) { continue } return fmt.Errorf("failed to resolve configmap name: %w", err) @@ -446,10 +403,10 @@ func extendPodSpecPatch( var configMapName string if configMapAsEnv.ConfigMapNameParameter != nil { - resolvedConfigMapName, err := resolveInputParameterStr(ctx, dag, pipeline, opts, mlmd, + resolvedConfigMapName, err := resolver.ResolveInputParameterStr(opts, configMapAsEnv.ConfigMapNameParameter, inputParams) if err != nil { - if errors.Is(err, ErrResolvedParameterNull) { + if errors.Is(err, resolver.ErrResolvedParameterNull) { continue } return fmt.Errorf("failed to resolve configmap name: %w", err) @@ -462,7 +419,7 @@ func extendPodSpecPatch( "configmap environment variable in executor config") } - configMapEnvVar.ValueFrom.ConfigMapKeyRef.LocalObjectReference.Name = configMapName + configMapEnvVar.ValueFrom.ConfigMapKeyRef.Name = configMapName if setOnPod[pipelinespec.TaskConfigPassthroughType_ENV] { podSpec.Containers[0].Env = append(podSpec.Containers[0].Env, configMapEnvVar) @@ -478,10 +435,10 @@ func extendPodSpecPatch( for _, imagePullSecret := range kubernetesExecutorConfig.GetImagePullSecret() { var secretName string if imagePullSecret.SecretNameParameter != nil { - resolvedSecretName, err := resolveInputParameterStr(ctx, dag, pipeline, opts, mlmd, + resolvedSecretName, err := resolver.ResolveInputParameterStr(opts, imagePullSecret.SecretNameParameter, inputParams) if err != nil { - if errors.Is(err, ErrResolvedParameterNull) { + if errors.Is(err, resolver.ErrResolvedParameterNull) { continue } return fmt.Errorf("failed to resolve image pull secret name: %w", err) @@ -613,10 +570,14 @@ func extendPodSpecPatch( } if nodeAffinityTerm.GetNodeAffinityJson() != nil { var k8sNodeAffinity json.RawMessage - err := resolveK8sJsonParameter(ctx, opts, dag, pipeline, mlmd, - nodeAffinityTerm.GetNodeAffinityJson(), inputParams, &k8sNodeAffinity) + err := resolver.ResolveK8sJSONParameter( + opts, + nodeAffinityTerm.GetNodeAffinityJson(), + inputParams, + &k8sNodeAffinity, + ) if err != nil { - if errors.Is(err, ErrResolvedParameterNull) { + if errors.Is(err, resolver.ErrResolvedParameterNull) { continue } return fmt.Errorf("failed to resolve node affinity json: %w", err) @@ -698,36 +659,54 @@ func extendPodSpecPatch( return nil } -// execution is passed by value because we make changes to it to generate fingerprint -func createPVC( +// execution is passed by pointer so we can update TaskID for the defer function +func createPVCTask( ctx context.Context, - k8sClient kubernetes.Interface, - execution Execution, - opts *Options, - cacheClient cacheutils.Client, - mlmd *metadata.Client, - ecfg *metadata.ExecutionConfig, -) (pvcName string, createdExecution *metadata.Execution, status pb.Execution_State, err error) { - // Create execution regardless the operation succeeds or not + clientManager client_manager.ClientManagerInterface, + execution *Execution, + opts *common.Options, + taskToCreate *apiV2beta1.PipelineTaskDetail, +) (err error) { + taskCreated := false + + // Ensure that we update the final task state after creation, or if we fail the procedure defer func() { - if createdExecution == nil { - pipeline, err := mlmd.GetPipeline(ctx, opts.PipelineName, opts.RunID, "", "", "", "") - if err != nil { - return + if err != nil { + taskToCreate.State = apiV2beta1.PipelineTaskDetail_FAILED + taskToCreate.StatusMetadata = &apiV2beta1.PipelineTaskDetail_StatusMetadata{ + Message: err.Error(), + } + } else { + // K8s ops drivers do not have executors, we can mark them completed at the driver stage. + taskToCreate.State = apiV2beta1.PipelineTaskDetail_SUCCEEDED + } + if taskCreated { + _, updateErr := clientManager.KFPAPIClient().UpdateTask(ctx, &apiV2beta1.UpdateTaskRequest{ + TaskId: execution.TaskID, + Task: taskToCreate, + }) + if updateErr != nil { + err = errors.Join(err, fmt.Errorf("failed to update task: %w", updateErr)) + } + } else { + _, createErr := clientManager.KFPAPIClient().CreateTask(ctx, &apiV2beta1.CreateTaskRequest{ + Task: taskToCreate, + }) + if createErr != nil { + err = errors.Join(err, fmt.Errorf("failed to create task: %w", createErr)) } - createdExecution, err = mlmd.CreateExecution(ctx, pipeline, ecfg) } + // Do not need to propagate statuses, this will be handled in the defer for Container(). }() - taskStartedTime := time.Now().Unix() - inputs := execution.ExecutorInput.Inputs glog.Infof("Input parameter values: %+v", inputs.ParameterValues) // Required input: access_modes accessModeInput, ok := inputs.ParameterValues["access_modes"] if !ok || accessModeInput == nil { - return "", createdExecution, pb.Execution_FAILED, fmt.Errorf("failed to create pvc: parameter access_modes not provided") + err = fmt.Errorf("failed to create pvc: parameter access_modes not provided") + return err } var accessModes []k8score.PersistentVolumeAccessMode for _, value := range accessModeInput.GetListValue().GetValues() { @@ -739,8 +718,11 @@ func createPVC( // If neither is provided, PVC name is a randomly generated UUID. pvcNameSuffixInput := inputs.ParameterValues["pvc_name_suffix"] pvcNameInput := inputs.ParameterValues["pvc_name"] + var pvcName string + if pvcNameInput.GetStringValue() != "" && pvcNameSuffixInput.GetStringValue() != "" { - return "", createdExecution, pb.Execution_FAILED, fmt.Errorf("failed to create pvc: at most one of pvc_name and pvc_name_suffix can be non-empty") + err = fmt.Errorf("failed to create pvc: at most one of pvc_name and pvc_name_suffix can be non-empty") + return err } else if pvcNameSuffixInput.GetStringValue() != "" { pvcName = uuid.NewString() + pvcNameSuffixInput.GetStringValue() // Add pvcName to the executor input for fingerprint generation @@ -753,10 +735,30 @@ func createPVC( execution.ExecutorInput.Inputs.ParameterValues[pvcName] = structpb.NewStringValue(pvcName) } - // Required input: size + if taskToCreate.Outputs == nil { + taskToCreate.Outputs = &apiV2beta1.PipelineTaskDetail_InputOutputs{ + Parameters: make([]*apiV2beta1.PipelineTaskDetail_InputOutputs_IOParameter, 0), + } + } + if taskToCreate.Outputs.Parameters == nil { + taskToCreate.Outputs.Parameters = make([]*apiV2beta1.PipelineTaskDetail_InputOutputs_IOParameter, 0) + } + taskToCreate.Outputs.Parameters = append( + taskToCreate.Outputs.Parameters, + &apiV2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: execution.ExecutorInput.Inputs.ParameterValues[pvcName], + ParameterKey: "name", // create-pvc output parameter is always "name" + Type: apiV2beta1.IOType_OUTPUT, + Producer: &apiV2beta1.IOProducer{ + TaskName: opts.Task.GetTaskInfo().GetName(), + }, + }) + + // Size is required input. volumeSizeInput, ok := inputs.ParameterValues["size"] if !ok || volumeSizeInput == nil { - return "", createdExecution, pb.Execution_FAILED, fmt.Errorf("failed to create pvc: parameter volumeSize not provided") + err = fmt.Errorf("failed to create pvc: parameter volumeSize not provided") + return err } // Optional input: storage_class_name @@ -781,52 +783,40 @@ func createPVC( volumeNameInput := inputs.ParameterValues["volume_name"] volumeName := volumeNameInput.GetStringValue() - // Get execution fingerprint and MLMD ID for caching - // If pvcName includes a randomly generated UUID, it is added in the execution input as a key-value pair for this purpose only - // The original execution is not changed. - fingerPrint, cachedMLMDExecutionID, err := getFingerPrintsAndID(&execution, opts, cacheClient, nil) + // Create Initial Task. We will update the status later if + // anything fails, or the task successfully completes. + taskToCreate.State = apiV2beta1.PipelineTaskDetail_RUNNING + task, err := clientManager.KFPAPIClient().CreateTask(ctx, &apiV2beta1.CreateTaskRequest{ + Task: taskToCreate, + }) if err != nil { - return "", createdExecution, pb.Execution_FAILED, err - } - ecfg.CachedMLMDExecutionID = cachedMLMDExecutionID - ecfg.FingerPrint = fingerPrint - - pipeline, err := mlmd.GetPipeline(ctx, opts.PipelineName, opts.RunID, "", "", "", "") - if err != nil { - return "", createdExecution, pb.Execution_FAILED, fmt.Errorf("error getting pipeline from MLMD: %w", err) - } - - // Create execution in MLMD - // TODO(Bobgy): change execution state to pending, because this is driver, execution hasn't started. - createdExecution, err = mlmd.CreateExecution(ctx, pipeline, ecfg) - if err != nil { - return "", createdExecution, pb.Execution_FAILED, fmt.Errorf("error creating MLMD execution for createpvc: %w", err) + err = fmt.Errorf("failed to create task: %w", err) + return err } - glog.Infof("Created execution: %s", createdExecution) - execution.ID = createdExecution.GetID() + glog.Infof("Created Task: %s", task.TaskId) + taskCreated = true + execution.TaskID = task.TaskId if !execution.WillTrigger() { - return "", createdExecution, pb.Execution_COMPLETE, nil + taskToCreate.State = apiV2beta1.PipelineTaskDetail_SKIPPED + glog.Infof("Condition not met, skipping task %s", task.TaskId) + return nil } - // Use cache and skip createpvc if all conditions met: + // Use cache and skip pvc creation if all conditions met: // (1) Cache is enabled globally // (2) Cache is enabled for the task - // (3) CachedMLMDExecutionID is non-empty, which means a cache entry exists - cached := false - execution.Cached = &cached - if !opts.CacheDisabled && opts.Task.GetCachingOptions().GetEnableCache() && ecfg.CachedMLMDExecutionID != "" { - executorOutput, outputArtifacts, err := reuseCachedOutputs(ctx, execution.ExecutorInput, mlmd, ecfg.CachedMLMDExecutionID) - if err != nil { - return "", createdExecution, pb.Execution_FAILED, err - } - // TODO(Bobgy): upload output artifacts. - // TODO(Bobgy): when adding artifacts, we will need execution.pipeline to be non-nil, because we need - // to publish output artifacts to the context too. - if err := mlmd.PublishExecution(ctx, createdExecution, executorOutput.GetParameterValues(), outputArtifacts, pb.Execution_CACHED); err != nil { - return "", createdExecution, pb.Execution_FAILED, fmt.Errorf("failed to publish cached execution: %w", err) - } + // (3) We had a cache hit for this Task + fingerPrint, cachedTask, err := getFingerPrintsAndID(ctx, execution, clientManager.KFPAPIClient(), opts, nil) + if err != nil { + return err + } + taskToCreate.CacheFingerprint = fingerPrint + execution.Cached = util.BoolPointer(false) + if !opts.CacheDisabled && opts.Task.GetCachingOptions().GetEnableCache() && cachedTask != nil { + taskToCreate.State = apiV2beta1.PipelineTaskDetail_CACHED + taskToCreate.Outputs = cachedTask.Outputs *execution.Cached = true - return pvcName, createdExecution, pb.Execution_CACHED, nil + return nil } // Create a PersistentVolumeClaim object @@ -848,136 +838,123 @@ func createPVC( } // Create the PVC in the cluster - createdPVC, err := k8sClient.CoreV1().PersistentVolumeClaims(opts.Namespace).Create(context.Background(), pvc, metav1.CreateOptions{}) + createdPVC, err := clientManager.K8sClient().CoreV1().PersistentVolumeClaims(opts.Namespace).Create(context.Background(), pvc, metav1.CreateOptions{}) if err != nil { - return "", createdExecution, pb.Execution_FAILED, fmt.Errorf("failed to create pvc: %w", err) - } - glog.Infof("Created PVC %s\n", createdPVC.ObjectMeta.Name) - - // Create a cache entry - if !opts.CacheDisabled && opts.Task.GetCachingOptions().GetEnableCache() { - err = createCache(ctx, createdExecution, opts, taskStartedTime, fingerPrint, cacheClient) - if err != nil { - return "", createdExecution, pb.Execution_FAILED, fmt.Errorf("failed to create cache entry for create pvc: %w", err) - } + err = fmt.Errorf("failed to create pvc: %w", err) + return err } - - return createdPVC.ObjectMeta.Name, createdExecution, pb.Execution_COMPLETE, nil + glog.Infof("Created PVC %s\n", createdPVC.Name) + taskToCreate.State = apiV2beta1.PipelineTaskDetail_SUCCEEDED + return nil } -func deletePVC( +func deletePVCTask( ctx context.Context, - k8sClient kubernetes.Interface, - execution Execution, - opts *Options, - cacheClient cacheutils.Client, - mlmd *metadata.Client, - ecfg *metadata.ExecutionConfig, -) (createdExecution *metadata.Execution, status pb.Execution_State, err error) { - // Create execution regardless the operation succeeds or not + clientManager client_manager.ClientManagerInterface, + execution *Execution, + opts *common.Options, + taskToCreate *apiV2beta1.PipelineTaskDetail, +) (err error) { + taskCreated := false + + // Ensure that we update the final task state after creation, or if we fail the procedure defer func() { - if createdExecution == nil { - pipeline, err := mlmd.GetPipeline(ctx, opts.PipelineName, opts.RunID, "", "", "", "") - if err != nil { - return + if err != nil { + taskToCreate.State = apiV2beta1.PipelineTaskDetail_FAILED + taskToCreate.StatusMetadata = &apiV2beta1.PipelineTaskDetail_StatusMetadata{ + Message: err.Error(), + } + } else { + // K8s ops drivers do not have executors, we can mark them completed at the driver stage. + taskToCreate.State = apiV2beta1.PipelineTaskDetail_SUCCEEDED + } + if taskCreated { + _, updateErr := clientManager.KFPAPIClient().UpdateTask(ctx, &apiV2beta1.UpdateTaskRequest{ + TaskId: execution.TaskID, + Task: taskToCreate, + }) + if updateErr != nil { + err = errors.Join(err, fmt.Errorf("failed to update task: %w", updateErr)) + } + } else { + _, createErr := clientManager.KFPAPIClient().CreateTask(ctx, &apiV2beta1.CreateTaskRequest{ + Task: taskToCreate, + }) + if createErr != nil { + err = errors.Join(err, fmt.Errorf("failed to create task: %w", createErr)) } - createdExecution, err = mlmd.CreateExecution(ctx, pipeline, ecfg) } + // Do not need to propagate statuses, this will be handled in the defer for Container(). }() - taskStartedTime := time.Now().Unix() - inputs := execution.ExecutorInput.Inputs glog.Infof("Input parameter values: %+v", inputs.ParameterValues) // Required input: pvc_name pvcNameInput, ok := inputs.ParameterValues["pvc_name"] if !ok || pvcNameInput == nil { - return createdExecution, pb.Execution_FAILED, fmt.Errorf("failed to delete pvc: required parameter pvc_name not provided") + err = fmt.Errorf("failed to delete pvc: required parameter pvc_name not provided") + return err } pvcName := pvcNameInput.GetStringValue() - // Get execution fingerprint and MLMD ID for caching - // If pvcName includes a randomly generated UUID, it is added in the execution input as a key-value pair for this purpose only - // The original execution is not changed. - fingerPrint, cachedMLMDExecutionID, err := getFingerPrintsAndID(&execution, opts, cacheClient, nil) - if err != nil { - return createdExecution, pb.Execution_FAILED, err - } - ecfg.CachedMLMDExecutionID = cachedMLMDExecutionID - ecfg.FingerPrint = fingerPrint - - pipeline, err := mlmd.GetPipeline(ctx, opts.PipelineName, opts.RunID, "", "", "", "") - if err != nil { - return createdExecution, pb.Execution_FAILED, fmt.Errorf("error getting pipeline from MLMD: %w", err) - } - - // Create execution in MLMD - // TODO(Bobgy): change execution state to pending, because this is driver, execution hasn't started. - createdExecution, err = mlmd.CreateExecution(ctx, pipeline, ecfg) + // Create Initial Task. We will update the status later if + // anything fails, or the task successfully completes. + taskToCreate.State = apiV2beta1.PipelineTaskDetail_RUNNING + task, err := clientManager.KFPAPIClient().CreateTask(ctx, &apiV2beta1.CreateTaskRequest{ + Task: taskToCreate, + }) if err != nil { - return createdExecution, pb.Execution_FAILED, fmt.Errorf("error creating MLMD execution for createpvc: %w", err) + err = fmt.Errorf("failed to create task: %w", err) + return err } - glog.Infof("Created execution: %s", createdExecution) - execution.ID = createdExecution.GetID() + glog.Infof("Created Task: %s", task.TaskId) + taskCreated = true + execution.TaskID = task.TaskId if !execution.WillTrigger() { - return createdExecution, pb.Execution_COMPLETE, nil + taskToCreate.State = apiV2beta1.PipelineTaskDetail_SKIPPED + glog.Infof("Condition not met, skipping task %s", task.TaskId) + return nil } - // Use cache and skip createpvc if all conditions met: + // Use cache and skip pvc creation if all conditions met: // (1) Cache is enabled globally // (2) Cache is enabled for the task - // (3) CachedMLMDExecutionID is non-empty, which means a cache entry exists - cached := false - execution.Cached = &cached - if !opts.CacheDisabled && opts.Task.GetCachingOptions().GetEnableCache() && ecfg.CachedMLMDExecutionID != "" { - executorOutput, outputArtifacts, err := reuseCachedOutputs(ctx, execution.ExecutorInput, mlmd, ecfg.CachedMLMDExecutionID) - if err != nil { - return createdExecution, pb.Execution_FAILED, err - } - // TODO(Bobgy): upload output artifacts. - // TODO(Bobgy): when adding artifacts, we will need execution.pipeline to be non-nil, because we need - // to publish output artifacts to the context too. - if err := mlmd.PublishExecution(ctx, createdExecution, executorOutput.GetParameterValues(), outputArtifacts, pb.Execution_CACHED); err != nil { - return createdExecution, pb.Execution_FAILED, fmt.Errorf("failed to publish cached execution: %w", err) - } + // (3) We had a cache hit for this Task + fingerPrint, cachedTask, err := getFingerPrintsAndID(ctx, execution, clientManager.KFPAPIClient(), opts, nil) + if err != nil { + return err + } + taskToCreate.CacheFingerprint = fingerPrint + execution.Cached = util.BoolPointer(false) + if !opts.CacheDisabled && opts.Task.GetCachingOptions().GetEnableCache() && cachedTask != nil { + taskToCreate.State = apiV2beta1.PipelineTaskDetail_CACHED + taskToCreate.Outputs = cachedTask.Outputs *execution.Cached = true - return createdExecution, pb.Execution_CACHED, nil + return nil } // Get the PVC you want to delete, verify that it exists. - _, err = k8sClient.CoreV1().PersistentVolumeClaims(opts.Namespace).Get(context.TODO(), pvcName, metav1.GetOptions{}) + _, err = clientManager.K8sClient().CoreV1().PersistentVolumeClaims(opts.Namespace).Get(context.TODO(), pvcName, metav1.GetOptions{}) if err != nil { - return createdExecution, pb.Execution_FAILED, fmt.Errorf("failed to delete pvc %s: cannot find pvc: %v", pvcName, err) + err = fmt.Errorf("failed to delete pvc %s: cannot find pvc: %v", pvcName, err) + return err } // Delete the PVC. - err = k8sClient.CoreV1().PersistentVolumeClaims(opts.Namespace).Delete(context.TODO(), pvcName, metav1.DeleteOptions{}) + err = clientManager.K8sClient().CoreV1().PersistentVolumeClaims(opts.Namespace).Delete(context.TODO(), pvcName, metav1.DeleteOptions{}) if err != nil { - return createdExecution, pb.Execution_FAILED, fmt.Errorf("failed to delete pvc %s: %v", pvcName, err) - } - - glog.Infof("Deleted PVC %s\n", pvcName) - - // Create a cache entry - if !opts.CacheDisabled && opts.Task.GetCachingOptions().GetEnableCache() && ecfg.CachedMLMDExecutionID != "" { - err = createCache(ctx, createdExecution, opts, taskStartedTime, fingerPrint, cacheClient) - if err != nil { - return createdExecution, pb.Execution_FAILED, fmt.Errorf("failed to create cache entry for delete pvc: %w", err) - } + err = fmt.Errorf("failed to delete pvc %s: %v", pvcName, err) + return err } - return createdExecution, pb.Execution_COMPLETE, nil + return nil } func makeVolumeMountPatch( - ctx context.Context, - opts Options, + opts common.Options, pvcMounts []*kubernetesplatform.PvcMount, - dag *metadata.DAG, - pipeline *metadata.Pipeline, - mlmd *metadata.Client, - inputParams map[string]*structpb.Value, + inputParams []*apiV2beta1.PipelineTaskDetail_InputOutputs_IOParameter, ) ([]k8score.VolumeMount, []k8score.Volume, error) { if pvcMounts == nil { return nil, nil, nil @@ -990,21 +967,20 @@ func makeVolumeMountPatch( pvcNameParameter = pvcMount.PvcNameParameter } else { // Support deprecated fields if pvcMount.GetConstant() != "" { - pvcNameParameter = inputParamConstant(pvcMount.GetConstant()) + pvcNameParameter = common.InputParamConstant(pvcMount.GetConstant()) } else if pvcMount.GetTaskOutputParameter() != nil { - pvcNameParameter = inputParamTaskOutput( + pvcNameParameter = common.InputParamTaskOutput( pvcMount.GetTaskOutputParameter().GetProducerTask(), pvcMount.GetTaskOutputParameter().GetOutputParameterKey(), ) } else if pvcMount.GetComponentInputParameter() != "" { - pvcNameParameter = inputParamComponent(pvcMount.GetComponentInputParameter()) + pvcNameParameter = common.InputParamComponent(pvcMount.GetComponentInputParameter()) } else { return nil, nil, fmt.Errorf("failed to make podSpecPatch: volume mount: volume name not provided") } } - resolvedPvcName, err := resolveInputParameterStr(ctx, dag, pipeline, opts, mlmd, - pvcNameParameter, inputParams) + resolvedPvcName, err := resolver.ResolveInputParameterStr(opts, pvcNameParameter, inputParams) if err != nil { return nil, nil, fmt.Errorf("failed to resolve pvc name: %w", err) } @@ -1029,62 +1005,3 @@ func makeVolumeMountPatch( } return volumeMounts, volumes, nil } - -// Usually we publish the execution in launcher, but for Kubernetes-specific operations, -// we skip the launcher. So this function is only used in these special cases. -func publishDriverExecution( - k8sClient *kubernetes.Clientset, - mlmd *metadata.Client, - ctx context.Context, - execution *metadata.Execution, - outputParameters map[string]*structpb.Value, - outputArtifacts []*metadata.OutputArtifact, - status pb.Execution_State, -) (err error) { - defer func() { - if err != nil { - err = fmt.Errorf("failed to publish driver execution %s: %w", execution.TaskName(), err) - } - }() - namespace, err := config.InPodNamespace() - if err != nil { - return fmt.Errorf("error getting namespace: %w", err) - } - - podName, err := config.InPodName() - if err != nil { - return fmt.Errorf("error getting pod name: %w", err) - } - - pod, err := k8sClient.CoreV1().Pods(namespace).Get(context.TODO(), podName, metav1.GetOptions{}) - if err != nil { - return fmt.Errorf("error retrieving info for pod %s: %w", podName, err) - } - - ecfg := &metadata.ExecutionConfig{ - PodName: podName, - PodUID: string(pod.UID), - Namespace: namespace, - } - if _, err := mlmd.PrePublishExecution(ctx, execution, ecfg); err != nil { - return fmt.Errorf("failed to prepublish: %w", err) - } - if err = mlmd.PublishExecution(ctx, execution, outputParameters, outputArtifacts, status); err != nil { - return fmt.Errorf("failed to publish: %w", err) - } - glog.Infof("Published execution of Kubernetes platform task %s.", execution.TaskName()) - return nil -} - -func createK8sClient() (*kubernetes.Clientset, error) { - // Initialize Kubernetes client set - restConfig, err := util.GetKubernetesConfig() - if err != nil { - return nil, fmt.Errorf("failed to initialize kubernetes client: %w", err) - } - k8sClient, err := kubernetes.NewForConfig(restConfig) - if err != nil { - return nil, fmt.Errorf("failed to initialize kubernetes client set: %w", err) - } - return k8sClient, nil -} diff --git a/backend/src/v2/driver/k8s_test.go b/backend/src/v2/driver/k8s_test.go index 3089a966a35..4fb66e5bbb1 100644 --- a/backend/src/v2/driver/k8s_test.go +++ b/backend/src/v2/driver/k8s_test.go @@ -5,7 +5,8 @@ import ( "testing" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" - "github.com/kubeflow/pipelines/backend/src/v2/metadata" + apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/v2/driver/common" "github.com/kubeflow/pipelines/kubernetes_platform/go/kubernetesplatform" "github.com/spf13/viper" "github.com/stretchr/testify/assert" @@ -15,11 +16,24 @@ import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) +// mapToIOParameters converts a map of parameter values to a slice of IOParameter +func mapToIOParameters(params map[string]*structpb.Value) []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter { + if params == nil { + return nil + } + result := make([]*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter, 0, len(params)) + for key, value := range params { + result = append(result, &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + ParameterKey: key, + Value: value, + }) + } + return result +} + func Test_makeVolumeMountPatch(t *testing.T) { type args struct { pvcMount []*kubernetesplatform.PvcMount - dag *metadata.DAG - dagTasks map[string]*metadata.Execution } tests := []struct { @@ -38,8 +52,6 @@ func Test_makeVolumeMountPatch(t *testing.T) { PvcReference: &kubernetesplatform.PvcMount_Constant{Constant: "pvc-name"}, }, }, - nil, - nil, }, "/mnt/path", "pvc-name", @@ -52,11 +64,9 @@ func Test_makeVolumeMountPatch(t *testing.T) { { MountPath: "/mnt/path", PvcReference: &kubernetesplatform.PvcMount_Constant{Constant: "not-used"}, - PvcNameParameter: inputParamConstant("pvc-name"), + PvcNameParameter: common.InputParamConstant("pvc-name"), }, }, - nil, - nil, }, "/mnt/path", "pvc-name", @@ -68,11 +78,9 @@ func Test_makeVolumeMountPatch(t *testing.T) { []*kubernetesplatform.PvcMount{ { MountPath: "/mnt/path", - PvcNameParameter: inputParamComponent("param_1"), + PvcNameParameter: common.InputParamComponent("param_1"), }, }, - nil, - nil, }, "/mnt/path", "pvc-name", @@ -85,13 +93,9 @@ func Test_makeVolumeMountPatch(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { volumeMounts, volumes, err := makeVolumeMountPatch( - context.Background(), - Options{}, + common.Options{}, tt.args.pvcMount, - tt.args.dag, - nil, - nil, - tt.inputParams, + mapToIOParameters(tt.inputParams), ) assert.Nil(t, err) assert.Equal(t, 1, len(volumeMounts)) @@ -156,7 +160,7 @@ func Test_makePodSpecPatch_nodeSelector(t *testing.T) { "Valid - Json Parameter", &kubernetesplatform.KubernetesExecutorConfig{ NodeSelector: &kubernetesplatform.NodeSelector{ - NodeSelectorJson: inputParamComponent("param_1"), + NodeSelectorJson: common.InputParamComponent("param_1"), }, }, &k8score.PodSpec{ @@ -190,7 +194,7 @@ func Test_makePodSpecPatch_nodeSelector(t *testing.T) { "Valid - empty json", &kubernetesplatform.KubernetesExecutorConfig{ NodeSelector: &kubernetesplatform.NodeSelector{ - NodeSelectorJson: inputParamComponent("param_1"), + NodeSelectorJson: common.InputParamComponent("param_1"), }, }, &k8score.PodSpec{ @@ -219,11 +223,8 @@ func Test_makePodSpecPatch_nodeSelector(t *testing.T) { err := extendPodSpecPatch( context.Background(), got, - Options{KubernetesExecutorConfig: tt.k8sExecCfg}, - nil, - nil, - nil, - tt.inputParams, + common.Options{KubernetesExecutorConfig: tt.k8sExecCfg}, + mapToIOParameters(tt.inputParams), taskConfig, ) assert.Nil(t, err) @@ -288,7 +289,7 @@ func Test_extendPodSpecPatch_Secret(t *testing.T) { SecretAsVolume: []*kubernetesplatform.SecretAsVolume{ { SecretName: "not-used", - SecretNameParameter: inputParamConstant("secret1"), + SecretNameParameter: common.InputParamConstant("secret1"), MountPath: "/data/path", }, }, @@ -329,7 +330,7 @@ func Test_extendPodSpecPatch_Secret(t *testing.T) { SecretAsVolume: []*kubernetesplatform.SecretAsVolume{ { SecretName: "not-used", - SecretNameParameter: inputParamConstant("secret1"), + SecretNameParameter: common.InputParamConstant("secret1"), MountPath: "/data/path", Optional: &[]bool{false}[0], }, @@ -371,7 +372,7 @@ func Test_extendPodSpecPatch_Secret(t *testing.T) { SecretAsVolume: []*kubernetesplatform.SecretAsVolume{ { SecretName: "not-used", - SecretNameParameter: inputParamConstant("secret1"), + SecretNameParameter: common.InputParamConstant("secret1"), MountPath: "/data/path", Optional: &[]bool{true}[0], }, @@ -432,7 +433,7 @@ func Test_extendPodSpecPatch_Secret(t *testing.T) { SecretAsVolume: []*kubernetesplatform.SecretAsVolume{ { SecretName: "not-used", - SecretNameParameter: inputParamComponent("param_1"), + SecretNameParameter: common.InputParamComponent("param_1"), MountPath: "/data/path", Optional: &[]bool{true}[0], }, @@ -519,7 +520,7 @@ func Test_extendPodSpecPatch_Secret(t *testing.T) { SecretAsEnv: []*kubernetesplatform.SecretAsEnv{ { SecretName: "not-used", - SecretNameParameter: inputParamConstant("my-secret"), + SecretNameParameter: common.InputParamConstant("my-secret"), KeyToEnv: []*kubernetesplatform.SecretAsEnv_SecretKeyToEnvMap{ { SecretKey: "password", @@ -562,7 +563,7 @@ func Test_extendPodSpecPatch_Secret(t *testing.T) { &kubernetesplatform.KubernetesExecutorConfig{ SecretAsEnv: []*kubernetesplatform.SecretAsEnv{ { - SecretNameParameter: inputParamComponent("param_1"), + SecretNameParameter: common.InputParamComponent("param_1"), KeyToEnv: []*kubernetesplatform.SecretAsEnv_SecretKeyToEnvMap{ { SecretKey: "password", @@ -607,7 +608,7 @@ func Test_extendPodSpecPatch_Secret(t *testing.T) { &kubernetesplatform.KubernetesExecutorConfig{ SecretAsEnv: []*kubernetesplatform.SecretAsEnv{ { - SecretNameParameter: inputParamConstant("my-secret"), + SecretNameParameter: common.InputParamConstant("my-secret"), KeyToEnv: []*kubernetesplatform.SecretAsEnv_SecretKeyToEnvMap{ { SecretKey: "password", @@ -651,7 +652,7 @@ func Test_extendPodSpecPatch_Secret(t *testing.T) { &kubernetesplatform.KubernetesExecutorConfig{ SecretAsEnv: []*kubernetesplatform.SecretAsEnv{ { - SecretNameParameter: inputParamConstant("my-secret"), + SecretNameParameter: common.InputParamConstant("my-secret"), KeyToEnv: []*kubernetesplatform.SecretAsEnv_SecretKeyToEnvMap{ { SecretKey: "password", @@ -698,11 +699,8 @@ func Test_extendPodSpecPatch_Secret(t *testing.T) { err := extendPodSpecPatch( context.Background(), tt.podSpec, - Options{KubernetesExecutorConfig: tt.k8sExecCfg}, - nil, - nil, - nil, - tt.inputParams, + common.Options{KubernetesExecutorConfig: tt.k8sExecCfg}, + mapToIOParameters(tt.inputParams), taskConfig, ) assert.Nil(t, err) @@ -772,7 +770,7 @@ func Test_extendPodSpecPatch_ConfigMap(t *testing.T) { ConfigMapAsVolume: []*kubernetesplatform.ConfigMapAsVolume{ { ConfigMapName: "not-used", - ConfigMapNameParameter: inputParamConstant("cm1"), + ConfigMapNameParameter: common.InputParamConstant("cm1"), MountPath: "/data/path", }, }, @@ -816,7 +814,7 @@ func Test_extendPodSpecPatch_ConfigMap(t *testing.T) { ConfigMapAsVolume: []*kubernetesplatform.ConfigMapAsVolume{ { ConfigMapName: "not-used", - ConfigMapNameParameter: inputParamConstant("cm1"), + ConfigMapNameParameter: common.InputParamConstant("cm1"), MountPath: "/data/path", Optional: &[]bool{false}[0], }, @@ -861,7 +859,7 @@ func Test_extendPodSpecPatch_ConfigMap(t *testing.T) { ConfigMapAsVolume: []*kubernetesplatform.ConfigMapAsVolume{ { ConfigMapName: "not-used", - ConfigMapNameParameter: inputParamConstant("cm1"), + ConfigMapNameParameter: common.InputParamConstant("cm1"), MountPath: "/data/path", Optional: &[]bool{true}[0], }, @@ -925,7 +923,7 @@ func Test_extendPodSpecPatch_ConfigMap(t *testing.T) { ConfigMapAsVolume: []*kubernetesplatform.ConfigMapAsVolume{ { ConfigMapName: "not-used", - ConfigMapNameParameter: inputParamComponent("param_1"), + ConfigMapNameParameter: common.InputParamComponent("param_1"), MountPath: "/data/path", Optional: &[]bool{true}[0], }, @@ -1015,7 +1013,7 @@ func Test_extendPodSpecPatch_ConfigMap(t *testing.T) { ConfigMapAsEnv: []*kubernetesplatform.ConfigMapAsEnv{ { ConfigMapName: "not-used", - ConfigMapNameParameter: inputParamConstant("my-cm"), + ConfigMapNameParameter: common.InputParamConstant("my-cm"), KeyToEnv: []*kubernetesplatform.ConfigMapAsEnv_ConfigMapKeyToEnvMap{ { ConfigMapKey: "foo", @@ -1059,7 +1057,7 @@ func Test_extendPodSpecPatch_ConfigMap(t *testing.T) { ConfigMapAsEnv: []*kubernetesplatform.ConfigMapAsEnv{ { ConfigMapName: "not-used", - ConfigMapNameParameter: inputParamComponent("param_1"), + ConfigMapNameParameter: common.InputParamComponent("param_1"), KeyToEnv: []*kubernetesplatform.ConfigMapAsEnv_ConfigMapKeyToEnvMap{ { ConfigMapKey: "foo", @@ -1104,7 +1102,7 @@ func Test_extendPodSpecPatch_ConfigMap(t *testing.T) { &kubernetesplatform.KubernetesExecutorConfig{ ConfigMapAsEnv: []*kubernetesplatform.ConfigMapAsEnv{ { - ConfigMapNameParameter: inputParamConstant("my-cm"), + ConfigMapNameParameter: common.InputParamConstant("my-cm"), KeyToEnv: []*kubernetesplatform.ConfigMapAsEnv_ConfigMapKeyToEnvMap{ { ConfigMapKey: "foo", @@ -1148,7 +1146,7 @@ func Test_extendPodSpecPatch_ConfigMap(t *testing.T) { &kubernetesplatform.KubernetesExecutorConfig{ ConfigMapAsEnv: []*kubernetesplatform.ConfigMapAsEnv{ { - ConfigMapNameParameter: inputParamConstant("my-cm"), + ConfigMapNameParameter: common.InputParamConstant("my-cm"), KeyToEnv: []*kubernetesplatform.ConfigMapAsEnv_ConfigMapKeyToEnvMap{ { ConfigMapKey: "foo", @@ -1195,11 +1193,8 @@ func Test_extendPodSpecPatch_ConfigMap(t *testing.T) { err := extendPodSpecPatch( context.Background(), tt.podSpec, - Options{KubernetesExecutorConfig: tt.k8sExecCfg}, - nil, - nil, - nil, - tt.inputParams, + common.Options{KubernetesExecutorConfig: tt.k8sExecCfg}, + mapToIOParameters(tt.inputParams), taskConfig, ) assert.Nil(t, err) @@ -1369,11 +1364,8 @@ func Test_extendPodSpecPatch_EmptyVolumeMount(t *testing.T) { err := extendPodSpecPatch( context.Background(), tt.podSpec, - Options{KubernetesExecutorConfig: tt.k8sExecCfg}, - nil, - nil, - nil, - map[string]*structpb.Value{}, + common.Options{KubernetesExecutorConfig: tt.k8sExecCfg}, + mapToIOParameters(map[string]*structpb.Value{}), taskConfig, ) assert.Nil(t, err) @@ -1417,8 +1409,8 @@ func Test_extendPodSpecPatch_ImagePullSecrets(t *testing.T) { "Valid - SecretA and SecretB", &kubernetesplatform.KubernetesExecutorConfig{ ImagePullSecret: []*kubernetesplatform.ImagePullSecret{ - {SecretName: "SecretA", SecretNameParameter: inputParamConstant("SecretA")}, - {SecretName: "SecretB", SecretNameParameter: inputParamConstant("SecretB")}, + {SecretName: "SecretA", SecretNameParameter: common.InputParamConstant("SecretA")}, + {SecretName: "SecretB", SecretNameParameter: common.InputParamConstant("SecretB")}, }, }, &k8score.PodSpec{ @@ -1464,8 +1456,8 @@ func Test_extendPodSpecPatch_ImagePullSecrets(t *testing.T) { "Valid - multiple input parameter secret names", &kubernetesplatform.KubernetesExecutorConfig{ ImagePullSecret: []*kubernetesplatform.ImagePullSecret{ - {SecretName: "not-used1", SecretNameParameter: inputParamComponent("param_1")}, - {SecretName: "not-used2", SecretNameParameter: inputParamComponent("param_2")}, + {SecretName: "not-used1", SecretNameParameter: common.InputParamComponent("param_1")}, + {SecretName: "not-used2", SecretNameParameter: common.InputParamComponent("param_2")}, }, }, &k8score.PodSpec{ @@ -1495,11 +1487,8 @@ func Test_extendPodSpecPatch_ImagePullSecrets(t *testing.T) { err := extendPodSpecPatch( context.Background(), got, - Options{KubernetesExecutorConfig: tt.k8sExecCfg}, - nil, - nil, - nil, - tt.inputParams, + common.Options{KubernetesExecutorConfig: tt.k8sExecCfg}, + mapToIOParameters(tt.inputParams), nil, ) assert.Nil(t, err) @@ -1619,7 +1608,7 @@ func Test_extendPodSpecPatch_Tolerations(t *testing.T) { &kubernetesplatform.KubernetesExecutorConfig{ Tolerations: []*kubernetesplatform.Toleration{ { - TolerationJson: inputParamComponent("param_1"), + TolerationJson: common.InputParamComponent("param_1"), }, }, }, @@ -1655,7 +1644,7 @@ func Test_extendPodSpecPatch_Tolerations(t *testing.T) { &kubernetesplatform.KubernetesExecutorConfig{ Tolerations: []*kubernetesplatform.Toleration{ { - TolerationJson: inputParamComponent("param_1"), + TolerationJson: common.InputParamComponent("param_1"), }, }, }, @@ -1677,7 +1666,7 @@ func Test_extendPodSpecPatch_Tolerations(t *testing.T) { &kubernetesplatform.KubernetesExecutorConfig{ Tolerations: []*kubernetesplatform.Toleration{ { - TolerationJson: inputParamComponent("param_1"), + TolerationJson: common.InputParamComponent("param_1"), }, { TolerationJson: structInputParamConstant(map[string]interface{}{ @@ -1745,7 +1734,7 @@ func Test_extendPodSpecPatch_Tolerations(t *testing.T) { &kubernetesplatform.KubernetesExecutorConfig{ Tolerations: []*kubernetesplatform.Toleration{ { - TolerationJson: inputParamComponent("param_1"), + TolerationJson: common.InputParamComponent("param_1"), }, }, }, @@ -1810,10 +1799,10 @@ func Test_extendPodSpecPatch_Tolerations(t *testing.T) { &kubernetesplatform.KubernetesExecutorConfig{ Tolerations: []*kubernetesplatform.Toleration{ { - TolerationJson: inputParamComponent("param_1"), + TolerationJson: common.InputParamComponent("param_1"), }, { - TolerationJson: inputParamComponent("param_2"), + TolerationJson: common.InputParamComponent("param_2"), }, { Key: "key5", @@ -1905,7 +1894,7 @@ func Test_extendPodSpecPatch_Tolerations(t *testing.T) { &kubernetesplatform.KubernetesExecutorConfig{ Tolerations: []*kubernetesplatform.Toleration{ { - TolerationJson: inputParamComponent("param_1"), + TolerationJson: common.InputParamComponent("param_1"), }, }, }, @@ -1934,11 +1923,8 @@ func Test_extendPodSpecPatch_Tolerations(t *testing.T) { err := extendPodSpecPatch( context.Background(), got, - Options{KubernetesExecutorConfig: tt.k8sExecCfg}, - nil, - nil, - nil, - tt.inputParams, + common.Options{KubernetesExecutorConfig: tt.k8sExecCfg}, + mapToIOParameters(tt.inputParams), taskConfig, ) assert.Nil(t, err) @@ -1987,7 +1973,7 @@ func Test_extendPodSpecPatch_FieldPathAsEnv(t *testing.T) { SecretAsEnv: []*kubernetesplatform.SecretAsEnv{ { SecretName: "my-secret", - SecretNameParameter: inputParamConstant("my-secret"), + SecretNameParameter: common.InputParamConstant("my-secret"), KeyToEnv: []*kubernetesplatform.SecretAsEnv_SecretKeyToEnvMap{ { SecretKey: "password", @@ -2041,11 +2027,8 @@ func Test_extendPodSpecPatch_FieldPathAsEnv(t *testing.T) { err := extendPodSpecPatch( context.Background(), got, - Options{KubernetesExecutorConfig: tt.k8sExecCfg}, - nil, - nil, - nil, - map[string]*structpb.Value{}, + common.Options{KubernetesExecutorConfig: tt.k8sExecCfg}, + mapToIOParameters(map[string]*structpb.Value{}), taskConfig, ) assert.Nil(t, err) @@ -2114,11 +2097,8 @@ func Test_extendPodSpecPatch_ActiveDeadlineSeconds(t *testing.T) { err := extendPodSpecPatch( context.Background(), got, - Options{KubernetesExecutorConfig: tt.k8sExecCfg}, - nil, - nil, - nil, - map[string]*structpb.Value{}, + common.Options{KubernetesExecutorConfig: tt.k8sExecCfg}, + mapToIOParameters(map[string]*structpb.Value{}), nil, ) assert.Nil(t, err) @@ -2204,11 +2184,8 @@ func Test_extendPodSpecPatch_ImagePullPolicy(t *testing.T) { err := extendPodSpecPatch( context.Background(), tt.podSpec, - Options{KubernetesExecutorConfig: tt.k8sExecCfg}, - nil, - nil, - nil, - map[string]*structpb.Value{}, + common.Options{KubernetesExecutorConfig: tt.k8sExecCfg}, + mapToIOParameters(map[string]*structpb.Value{}), nil, ) assert.Nil(t, err) @@ -2402,11 +2379,8 @@ func Test_extendPodSpecPatch_GenericEphemeralVolume(t *testing.T) { err := extendPodSpecPatch( context.Background(), tt.podSpec, - Options{KubernetesExecutorConfig: tt.k8sExecCfg}, - nil, - nil, - nil, - map[string]*structpb.Value{}, + common.Options{KubernetesExecutorConfig: tt.k8sExecCfg}, + mapToIOParameters(map[string]*structpb.Value{}), taskConfig, ) assert.Nil(t, err) @@ -2706,11 +2680,8 @@ func Test_extendPodSpecPatch_NodeAffinity(t *testing.T) { err := extendPodSpecPatch( context.Background(), got, - Options{KubernetesExecutorConfig: tt.k8sExecCfg}, - nil, - nil, - nil, - tt.inputParams, + common.Options{KubernetesExecutorConfig: tt.k8sExecCfg}, + mapToIOParameters(tt.inputParams), taskConfig, ) assert.NoError(t, err) @@ -2752,7 +2723,7 @@ func Test_extendPodSpecPatch_TaskConfig_CapturesAndApplies(t *testing.T) { }}, PvcMount: []*kubernetesplatform.PvcMount{{ MountPath: "/data", - PvcNameParameter: inputParamConstant("kubernetes-task-config-pvc"), + PvcNameParameter: common.InputParamConstant("kubernetes-task-config-pvc"), }}, SecretAsEnv: []*kubernetesplatform.SecretAsEnv{{ SecretName: "my-secret", @@ -2791,11 +2762,8 @@ func Test_extendPodSpecPatch_TaskConfig_CapturesAndApplies(t *testing.T) { err := extendPodSpecPatch( context.Background(), podSpec, - Options{KubernetesExecutorConfig: cfg, Component: comp}, - nil, - nil, - nil, - map[string]*structpb.Value{}, + common.Options{KubernetesExecutorConfig: cfg, Component: comp}, + mapToIOParameters(map[string]*structpb.Value{}), taskCfg, ) assert.NoError(t, err) @@ -2929,7 +2897,7 @@ func Test_extendPodSpecPatch_PvcMounts_Passthrough_NotAppliedToPod(t *testing.T) cfg := &kubernetesplatform.KubernetesExecutorConfig{ PvcMount: []*kubernetesplatform.PvcMount{{ MountPath: "/data", - PvcNameParameter: inputParamConstant("my-pvc"), + PvcNameParameter: common.InputParamConstant("my-pvc"), }}, } comp := &pipelinespec.ComponentSpec{ @@ -2942,11 +2910,8 @@ func Test_extendPodSpecPatch_PvcMounts_Passthrough_NotAppliedToPod(t *testing.T) err := extendPodSpecPatch( context.Background(), podSpec, - Options{KubernetesExecutorConfig: cfg, Component: comp}, - nil, - nil, - nil, - map[string]*structpb.Value{}, + common.Options{KubernetesExecutorConfig: cfg, Component: comp}, + mapToIOParameters(map[string]*structpb.Value{}), taskCfg, ) assert.NoError(t, err) @@ -2963,7 +2928,7 @@ func Test_extendPodSpecPatch_PvcMounts_Passthrough_AppliedToPod(t *testing.T) { cfg := &kubernetesplatform.KubernetesExecutorConfig{ PvcMount: []*kubernetesplatform.PvcMount{{ MountPath: "/data", - PvcNameParameter: inputParamConstant("my-pvc"), + PvcNameParameter: common.InputParamConstant("my-pvc"), }}, } comp := &pipelinespec.ComponentSpec{ @@ -2976,11 +2941,8 @@ func Test_extendPodSpecPatch_PvcMounts_Passthrough_AppliedToPod(t *testing.T) { err := extendPodSpecPatch( context.Background(), podSpec, - Options{KubernetesExecutorConfig: cfg, Component: comp}, - nil, - nil, - nil, - map[string]*structpb.Value{}, + common.Options{KubernetesExecutorConfig: cfg, Component: comp}, + mapToIOParameters(map[string]*structpb.Value{}), taskCfg, ) assert.NoError(t, err) diff --git a/backend/src/v2/driver/launcher_integration_example_test.go b/backend/src/v2/driver/launcher_integration_example_test.go new file mode 100644 index 00000000000..bedaa0c51a4 --- /dev/null +++ b/backend/src/v2/driver/launcher_integration_example_test.go @@ -0,0 +1,343 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package driver + +import ( + "context" + "testing" + + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// TestExample_SingleTask demonstrates the basic pattern for testing +// driver and launcher together for a single task using Execute(). +func TestExample_SingleTask(t *testing.T) { + // Step 1: Create test context with root DAG already executed + tc := NewTestContextWithRootExecuted( + t, + &pipelinespec.PipelineJob_RuntimeConfig{}, + "test_data/taskOutputArtifact_test.yaml", + ) + + // Step 2: Run driver for a task + execution, _ := tc.RunContainerDriver( + "create-dataset", + tc.RootTask, + nil, // not in a loop + false, // don't auto-update scope yet + ) + + // Verify driver created proper ExecutorInput + require.NotNil(t, execution.ExecutorInput) + require.NotNil(t, execution.ExecutorInput.Outputs) + + // Step 3: Run launcher using Execute() which tests the full flow including: + // - Task output parameter updates via KFP API + // - Task status updates to SUCCEEDED + // - Status propagation up the DAG hierarchy + launcherExec := tc.RunLauncher(execution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + }, false) + + // Verify launcher executed successfully + require.Equal(t, 1, launcherExec.MockCmd.CallCount(), + "Launcher should have executed the component command once") + + // Verify task status was updated to SUCCEEDED + require.Equal(t, apiv2beta1.PipelineTaskDetail_SUCCEEDED, launcherExec.Task.State, + "Task should be marked as SUCCEEDED") + + // Clean up scope + _, ok := tc.Pop() + require.True(t, ok) +} + +// TestExample_SimpleArtifactPassing demonstrates the basic pattern for testing +// driver and launcher together with artifact passing between tasks. +func TestExample_SimpleArtifactPassing(t *testing.T) { + // Step 1: Create test context with root DAG already executed + tc := NewTestContextWithRootExecuted( + t, + &pipelinespec.PipelineJob_RuntimeConfig{}, + "test_data/taskOutputArtifact_test.yaml", + ) + + // Step 2: Run driver for producer task + // The driver will create a task and prepare ExecutorInput with output artifact specification + producerExecution, _ := tc.RunContainerDriver( + "create-dataset", + tc.RootTask, + nil, // not in a loop + false, // don't auto-update scope yet - we need it for RunLauncher + ) + + // Verify driver created proper ExecutorInput + require.NotNil(t, producerExecution.ExecutorInput) + require.NotNil(t, producerExecution.ExecutorInput.Outputs) + require.Contains(t, producerExecution.ExecutorInput.Outputs.Artifacts, "output_dataset") + + // Step 3: Run launcher to simulate actual component execution + // This uses the ExecutorInput prepared by the driver and simulates: + // - Downloading input artifacts + // - Executing the component command + // - Collecting output parameters + // - Uploading output artifacts + producerLauncherExec := tc.RunLauncher(producerExecution, map[string][]byte{ + // Provide the output metadata file that the component would write + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + }, false) + + // Verify launcher executed successfully + require.Equal(t, 1, producerLauncherExec.MockCmd.CallCount(), + "Launcher should have executed the component command once") + + // Verify launcher created output artifact + require.Len(t, producerLauncherExec.Task.Outputs.Artifacts, 1, + "Task should have one output artifact") + + // Get the artifact ID for later verification + outputArtifactID := producerLauncherExec.Task.Outputs.Artifacts[0].Artifacts[0].ArtifactId + require.NotEmpty(t, outputArtifactID) + + // Verify launcher uploaded the artifact to object store + uploads := producerLauncherExec.MockObjStore.GetUploadCallsForKey("output_dataset") + require.Len(t, uploads, 1, "Launcher should have uploaded one artifact") + + // Now we're done with the producer task, pop its scope + _, ok := tc.Pop() + require.True(t, ok) + + // Step 4: Run driver for consumer task + // The driver should resolve the input artifact from the producer task + consumerExecution, _ := tc.RunContainerDriver( + "process-dataset", + tc.RootTask, + nil, + false, // don't auto-update scope yet + ) + + // Verify driver resolved input artifact from producer + require.Contains(t, consumerExecution.ExecutorInput.Inputs.Artifacts, "input_dataset") + inputArtifacts := consumerExecution.ExecutorInput.Inputs.Artifacts["input_dataset"].Artifacts + require.Len(t, inputArtifacts, 1, "Should have one input artifact") + require.Equal(t, outputArtifactID, inputArtifacts[0].ArtifactId, + "Input artifact should be the output artifact from producer") + + // Step 5: Run launcher for consumer task + consumerLauncherExec := tc.RunLauncher(consumerExecution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + }, false) + + // Verify launcher downloaded the input artifact + require.Len(t, consumerLauncherExec.MockObjStore.DownloadCalls, 1, + "Launcher should have downloaded one artifact") + assert.Equal(t, "input_dataset", consumerLauncherExec.MockObjStore.DownloadCalls[0].ArtifactKey, + "Downloaded artifact should be the input_dataset") + + // Verify launcher executed the consumer command + require.Equal(t, 1, consumerLauncherExec.MockCmd.CallCount(), + "Launcher should have executed the component command once") + + // Clean up scope + _, ok = tc.Pop() + require.True(t, ok) +} + +// TestExample_ParameterPassing demonstrates testing parameter passing between tasks. +func TestExample_ParameterPassing(t *testing.T) { + tc := NewTestContextWithRootExecuted( + t, + &pipelinespec.PipelineJob_RuntimeConfig{}, + "test_data/taskOutputParameter_test.yaml", + ) + + // Step 1: Run driver for producer task + producerExecution, _ := tc.RunContainerDriver("create-dataset", tc.RootTask, nil, false) + + // Verify driver prepared output parameter specification + require.Contains(t, producerExecution.ExecutorInput.Outputs.Parameters, "output_parameter_path") + + // Step 2: Run launcher and mock output parameter creation + // Get the dynamic output parameter file path from ExecutorInput + outputParamPath := producerExecution.ExecutorInput.Outputs.Parameters["output_parameter_path"].OutputFile + + producerLauncherExec := tc.RunLauncher(producerExecution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + // Simulate the component writing output parameter to file + outputParamPath: []byte("10.0"), + }, false) + + // Verify launcher executed + require.Equal(t, 1, producerLauncherExec.MockCmd.CallCount()) + + // Execute() now automatically collects output parameters from files and uploads to API. + // Verify the output parameter was created by the launcher + require.Len(t, producerLauncherExec.Task.Outputs.Parameters, 1) + outputParam := producerLauncherExec.Task.Outputs.Parameters[0] + require.Equal(t, "output_parameter_path", outputParam.ParameterKey) + // The parameter type is STRING in the component spec, so it's parsed as a string + require.Equal(t, "10.0", outputParam.Value.GetStringValue()) + + // Clean up producer scope + _, ok := tc.Pop() + require.True(t, ok) + + // Refresh the run to get the updated producer task outputs + tc.RefreshRun() + + // Step 3: Run driver for consumer task + consumerExecution, _ := tc.RunContainerDriver("process-dataset", tc.RootTask, nil, false) + + // Verify driver resolved input parameter from producer + require.Contains(t, consumerExecution.ExecutorInput.Inputs.ParameterValues, "input_dataset") + // The parameter value is passed as a string from the producer + inputValue := consumerExecution.ExecutorInput.Inputs.ParameterValues["input_dataset"] + require.NotNil(t, inputValue) + // It should be a string value "10.0" + require.Equal(t, "10.0", inputValue.GetStringValue()) + + // Step 4: Run launcher for consumer + // The consumer also has output parameters we need to provide + consumerOutputPath := consumerExecution.ExecutorInput.Outputs.Parameters["output_int"].OutputFile + consumerLauncherExec := tc.RunLauncher(consumerExecution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + consumerOutputPath: []byte("42"), + }, false) + + // Verify launcher executed with resolved parameter + require.Equal(t, 1, consumerLauncherExec.MockCmd.CallCount()) + + // Clean up consumer scope + _, ok = tc.Pop() + require.True(t, ok) +} + +// TestExample_LoopIteration demonstrates testing loop iterations with artifact collection. +func TestExample_LoopIteration(t *testing.T) { + tc := NewTestContextWithRootExecuted( + t, + &pipelinespec.PipelineJob_RuntimeConfig{}, + "test_data/loop_collected_raw_Iterator.yaml", + ) + + // Step 1: Enter the secondary pipeline + _, secondaryPipelineTask := tc.RunDagDriver("secondary-pipeline", tc.RootTask) + + // Step 2: Run the task that produces input for the loop + producerExecution, _ := tc.RunContainerDriver("create-dataset", secondaryPipelineTask, nil, false) + + // Run launcher for producer + producerLauncherExec := tc.RunLauncher(producerExecution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + }, false) + require.Equal(t, 1, producerLauncherExec.MockCmd.CallCount()) + + // Get producer's output artifact ID + producerArtifactID := producerLauncherExec.Task.Outputs.Artifacts[0].Artifacts[0].ArtifactId + + // Clean up producer scope + _, ok := tc.Pop() + require.True(t, ok) + + // Step 3: Run loop DAG driver + loopExecution, loopTask := tc.RunDagDriver("for-loop-2", secondaryPipelineTask) + + // Verify loop received input artifact from producer + require.Len(t, loopTask.Inputs.Artifacts, 1) + + // Step 4: Run loop iterations + for index := 0; index < 3; index++ { + // Run driver for this iteration + iterExecution, _ := tc.RunContainerDriver( + "process-dataset", + loopTask, + &[]int64{int64(index)}[0], // iteration index + false, // don't auto-update scope + ) + + // Verify driver resolved input artifact (same for all iterations) + require.Contains(t, iterExecution.ExecutorInput.Inputs.Artifacts, "input_dataset") + inputArtifact := iterExecution.ExecutorInput.Inputs.Artifacts["input_dataset"].Artifacts[0] + require.Equal(t, producerArtifactID, inputArtifact.ArtifactId) + + // Run launcher for iteration + iterLauncherExec := tc.RunLauncher(iterExecution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + }, false) + + // Verify launcher executed + require.Equal(t, 1, iterLauncherExec.MockCmd.CallCount()) + + // Clean up iteration scope + _, ok = tc.Pop() + require.True(t, ok) + } + + // Step 5: Exit loop DAG + tc.ExitDag() + + // Verify loop collected all iteration outputs + loopTask, err := tc.ClientManager.KFPAPIClient().GetTask( + context.Background(), + &apiv2beta1.GetTaskRequest{TaskId: loopExecution.TaskID}, + ) + require.NoError(t, err) + require.Len(t, loopTask.Outputs.Artifacts, 3, + "Loop should have collected outputs from 3 iterations") +} + +// TestExample_CustomCommandOutput demonstrates testing with custom command output verification. +func TestExample_CustomCommandOutput(t *testing.T) { + tc := NewTestContextWithRootExecuted( + t, + &pipelinespec.PipelineJob_RuntimeConfig{}, + "test_data/taskOutputArtifact_test.yaml", + ) + + // Run driver + execution, _ := tc.RunContainerDriver("create-dataset", tc.RootTask, nil, false) + + // Run launcher + launcherExec := tc.RunLauncher(execution, map[string][]byte{ + "/tmp/kfp_outputs/output_metadata.json": []byte("{}"), + }, false) + + // Verify the command that was executed + require.Equal(t, 1, launcherExec.MockCmd.CallCount()) + + // You can verify the exact command and args if needed + cmdCall := launcherExec.MockCmd.RunCalls[0] + assert.NotEmpty(t, cmdCall.Cmd, "Command should not be empty") + + // Verify file system operations + // The launcher reads the output metadata file if it exists + // In our case, we provided an empty JSON file + assert.GreaterOrEqual(t, len(launcherExec.MockFS.ReadFileCalls), 0, + "Launcher may have read files during execution") + + // Verify object store operations + // Check that the artifact was uploaded with correct URI + uploads := launcherExec.MockObjStore.GetUploadCallsForKey("output_dataset") + require.Len(t, uploads, 1) + // Artifact URI should use a valid object store scheme (minio://, s3://, gs://, etc.) + assert.Regexp(t, `^(minio|s3|gs)://`, uploads[0].RemoteURI, "Artifact URI should use a valid object store scheme") + + // Clean up scope + _, ok := tc.Pop() + require.True(t, ok) +} diff --git a/backend/src/v2/driver/resolve.go b/backend/src/v2/driver/resolve.go deleted file mode 100644 index d630a81a602..00000000000 --- a/backend/src/v2/driver/resolve.go +++ /dev/null @@ -1,1099 +0,0 @@ -// Copyright 2025 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package driver - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "strings" - - "github.com/golang/glog" - "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" - "github.com/kubeflow/pipelines/backend/src/v2/component" - "github.com/kubeflow/pipelines/backend/src/v2/expression" - "github.com/kubeflow/pipelines/backend/src/v2/metadata" - "google.golang.org/genproto/googleapis/rpc/status" - "google.golang.org/protobuf/encoding/protojson" - "google.golang.org/protobuf/types/known/structpb" -) - -var ErrResolvedParameterNull = errors.New("the resolved input parameter is null") - -// resolveUpstreamOutputsConfig is just a config struct used to store the input -// parameters of the resolveUpstreamParameters and resolveUpstreamArtifacts -// functions. -type resolveUpstreamOutputsConfig struct { - ctx context.Context - paramSpec *pipelinespec.TaskInputsSpec_InputParameterSpec - artifactSpec *pipelinespec.TaskInputsSpec_InputArtifactSpec - dag *metadata.DAG - pipeline *metadata.Pipeline - mlmd *metadata.Client - err func(error) error -} - -// getDAGTasks is a recursive function that returns a map of all tasks across all DAGs in the context of nested DAGs. -func getDAGTasks( - ctx context.Context, - dag *metadata.DAG, - pipeline *metadata.Pipeline, - mlmd *metadata.Client, - flattenedTasks map[string]*metadata.Execution, -) (map[string]*metadata.Execution, error) { - if flattenedTasks == nil { - flattenedTasks = make(map[string]*metadata.Execution) - } - currentExecutionTasks, err := mlmd.GetExecutionsInDAG(ctx, dag, pipeline, true) - if err != nil { - return nil, err - } - for k, v := range currentExecutionTasks { - flattenedTasks[k] = v - } - for _, v := range currentExecutionTasks { - if v.GetExecution().GetType() == "system.DAGExecution" { - _, ok := v.GetExecution().GetCustomProperties()["iteration_count"] - if ok { - glog.V(4).Infof("Found a ParallelFor task, %v.", v.TaskName()) - } - glog.V(4).Infof("Found a task, %v, with an execution type of system.DAGExecution. Adding its tasks to the task list.", v.TaskName()) - subDAG, err := mlmd.GetDAG(ctx, v.GetExecution().GetId()) - if err != nil { - return nil, err - } - // Pass the subDAG into a recursive call to getDAGTasks and update - // tasks to include the subDAG's tasks. - flattenedTasks, err = getDAGTasks(ctx, subDAG, pipeline, mlmd, flattenedTasks) - if err != nil { - return nil, err - } - } - } - - return flattenedTasks, nil -} - -func resolveInputs( - ctx context.Context, - dag *metadata.DAG, - iterationIndex *int, - pipeline *metadata.Pipeline, - opts Options, - mlmd *metadata.Client, - expr *expression.Expr, -) (inputs *pipelinespec.ExecutorInput_Inputs, err error) { - defer func() { - if err != nil { - err = fmt.Errorf("failed to resolve inputs: %w", err) - } - }() - - task := opts.Task - inputsSpec := opts.Component.GetInputDefinitions() - - glog.V(4).Infof("dag: %v", dag) - glog.V(4).Infof("task: %v", task) - inputParams, _, err := dag.Execution.GetParameters() - if err != nil { - return nil, err - } - inputArtifacts, err := mlmd.GetInputArtifactsByExecutionID(ctx, dag.Execution.GetID()) - if err != nil { - return nil, err - } - glog.Infof("parent DAG input parameters: %+v, artifacts: %+v", inputParams, inputArtifacts) - inputs = &pipelinespec.ExecutorInput_Inputs{ - ParameterValues: make(map[string]*structpb.Value), - Artifacts: make(map[string]*pipelinespec.ArtifactList), - } - isIterationDriver := iterationIndex != nil - - handleParameterExpressionSelector := func() error { - for name, paramSpec := range task.GetInputs().GetParameters() { - var selector string - if selector = paramSpec.GetParameterExpressionSelector(); selector == "" { - continue - } - wrap := func(e error) error { - return fmt.Errorf("resolving parameter %q: evaluation of parameter expression selector %q failed: %w", name, selector, e) - } - value, ok := inputs.ParameterValues[name] - if !ok { - return wrap(fmt.Errorf("value not found in inputs")) - } - selected, err := expr.Select(value, selector) - if err != nil { - return wrap(err) - } - inputs.ParameterValues[name] = selected - } - return nil - } - // Track parameters set to nil by the driver (for the case in which optional pipeline input parameters are not - // included, and default value is nil). - parametersSetNilByDriver := map[string]bool{} - handleParamTypeValidationAndConversion := func() error { - // TODO(Bobgy): verify whether there are inputs not in the inputs spec. - for name, spec := range inputsSpec.GetParameters() { - if task.GetParameterIterator() != nil { - if !isIterationDriver && task.GetParameterIterator().GetItemInput() == name { - // It's expected that an iterator does not have iteration item input parameter, - // because only iterations get the item input parameter. - continue - } - if isIterationDriver && task.GetParameterIterator().GetItems().GetInputParameter() == name { - // It's expected that an iteration does not have iteration items input parameter, - // because only the iterator has it. - continue - } - } - value, hasValue := inputs.GetParameterValues()[name] - - // Handle when parameter does not have input value - if !hasValue && !inputsSpec.GetParameters()[name].GetIsOptional() { - // When parameter is not optional and there is no input value, first check if there is a default value, - // if there is a default value, use it as the value of the parameter. - // if there is no default value, report error. - if inputsSpec.GetParameters()[name].GetDefaultValue() == nil { - return fmt.Errorf("neither value nor default value provided for non-optional parameter %q", name) - } - } else if !hasValue && inputsSpec.GetParameters()[name].GetIsOptional() { - // When parameter is optional and there is no input value, value comes from default value. - // But we don't pass the default value here. They are resolved internally within the component. - // Note: in the past the backend passed the default values into the component. This is a behavior change. - // See discussion: https://github.com/kubeflow/pipelines/pull/8765#discussion_r1119477085 - continue - } - - switch spec.GetParameterType() { - case pipelinespec.ParameterType_STRING: - _, isValueString := value.GetKind().(*structpb.Value_StringValue) - if !isValueString { - // If parameter was set to nil by driver, allow input parameter to have a nil value. - if parametersSetNilByDriver[name] { - continue - } - // TODO(Bobgy): discuss whether we want to allow auto type conversion - // all parameter types can be consumed as JSON string - text, err := metadata.PbValueToText(value) - if err != nil { - return fmt.Errorf("converting input parameter %q to string: %w", name, err) - } - inputs.GetParameterValues()[name] = structpb.NewStringValue(text) - } - default: - typeMismatch := func(actual string) error { - return fmt.Errorf("input parameter %q type mismatch: expect %s, got %s", name, spec.GetParameterType(), actual) - } - switch v := value.GetKind().(type) { - case *structpb.Value_NullValue: - // If parameter was set to nil by driver, allow input parameter to have a nil value. - if parametersSetNilByDriver[name] { - continue - } - return fmt.Errorf("got null for input parameter %q", name) - case *structpb.Value_StringValue: - // TODO(Bobgy): consider whether we support parsing string as JSON for any other types. - if spec.GetParameterType() != pipelinespec.ParameterType_STRING { - return typeMismatch("string") - } - case *structpb.Value_NumberValue: - if spec.GetParameterType() != pipelinespec.ParameterType_NUMBER_DOUBLE && spec.GetParameterType() != pipelinespec.ParameterType_NUMBER_INTEGER { - return typeMismatch("number") - } - case *structpb.Value_BoolValue: - if spec.GetParameterType() != pipelinespec.ParameterType_BOOLEAN { - return typeMismatch("bool") - } - case *structpb.Value_ListValue: - if spec.GetParameterType() != pipelinespec.ParameterType_LIST { - return typeMismatch("list") - } - case *structpb.Value_StructValue: - if (spec.GetParameterType() != pipelinespec.ParameterType_STRUCT) && (spec.GetParameterType() != pipelinespec.ParameterType_TASK_FINAL_STATUS) && (spec.GetParameterType() != pipelinespec.ParameterType_TASK_CONFIG) { - return typeMismatch("struct") - } - default: - return fmt.Errorf("parameter %s has unknown protobuf.Value type: %T", name, v) - } - } - } - return nil - } - // this function has many branches, so it's hard to add more postprocess steps - // TODO(Bobgy): consider splitting this function into several sub functions - defer func() { - if err == nil { - err = handleParameterExpressionSelector() - } - if err == nil { - err = handleParamTypeValidationAndConversion() - } - }() - // resolve input parameters - if isIterationDriver { - // resolve inputs for iteration driver is very different - artifacts, err := mlmd.GetInputArtifactsByExecutionID(ctx, dag.Execution.GetID()) - if err != nil { - return nil, err - } - inputs.ParameterValues = inputParams - inputs.Artifacts = artifacts - switch { - case task.GetArtifactIterator() != nil: - return nil, fmt.Errorf("artifact iterator not implemented yet") - case task.GetParameterIterator() != nil: - var itemsInput string - if task.GetParameterIterator().GetItems().GetInputParameter() != "" { - // input comes from outside the component - itemsInput = task.GetParameterIterator().GetItems().GetInputParameter() - } else if task.GetParameterIterator().GetItemInput() != "" { - // input comes from static input - itemsInput = task.GetParameterIterator().GetItemInput() - } else { - return nil, fmt.Errorf("cannot retrieve parameter iterator") - } - items, err := getItems(inputs.ParameterValues[itemsInput]) - if err != nil { - return nil, err - } - if *iterationIndex >= len(items) { - return nil, fmt.Errorf("bug: %v items found, but getting index %v", len(items), *iterationIndex) - } - delete(inputs.ParameterValues, itemsInput) - inputs.ParameterValues[task.GetParameterIterator().GetItemInput()] = items[*iterationIndex] - default: - return nil, fmt.Errorf("bug: iteration_index>=0, but task iterator is empty") - } - return inputs, nil - } - // A DAG driver (not Root DAG driver) indicates this is likely the start of a nested pipeline. - // Handle omitted optional pipeline input parameters similar to how they are handled on the root pipeline. - isDagDriver := opts.DriverType == "DAG" - if isDagDriver { - for name, paramSpec := range opts.Component.GetInputDefinitions().GetParameters() { - _, ok := task.Inputs.GetParameters()[name] - if !ok && paramSpec.IsOptional { - if paramSpec.GetDefaultValue() != nil { - // If no value was input, pass along the default value to the component. - inputs.ParameterValues[name] = paramSpec.GetDefaultValue() - } else { - // If no default value is set, pass along the null value to the component. - // This is analogous to a pipeline run being submitted without optional pipeline input parameters. - inputs.ParameterValues[name] = structpb.NewNullValue() - parametersSetNilByDriver[name] = true - } - } - } - } - // Handle parameters. - for name, paramSpec := range task.GetInputs().GetParameters() { - if compParam := opts.Component.GetInputDefinitions().GetParameters()[name]; compParam != nil { - // Skip resolving dsl.TaskConfig because that information is only available after initPodSpecPatch and - // extendPodSpecPatch are called. - if compParam.GetParameterType() == pipelinespec.ParameterType_TASK_CONFIG { - continue - } - } - - v, err := resolveInputParameter(ctx, dag, pipeline, opts, mlmd, paramSpec, inputParams) - if err != nil { - if !errors.Is(err, ErrResolvedParameterNull) { - return nil, err - } - - componentParam, ok := opts.Component.GetInputDefinitions().GetParameters()[name] - if ok && componentParam != nil && componentParam.IsOptional { - // If the resolved parameter was null and the component input parameter is optional, just skip setting - // it and the launcher will handle defaults. - continue - } - - return nil, err - } - - inputs.ParameterValues[name] = v - } - - // Handle artifacts. - for name, artifactSpec := range task.GetInputs().GetArtifacts() { - v, err := resolveInputArtifact(ctx, dag, pipeline, mlmd, name, artifactSpec, inputArtifacts, task) - if err != nil { - return nil, err - } - inputs.Artifacts[name] = v - } - // TODO(Bobgy): validate executor inputs match component inputs definition - return inputs, nil -} - -// resolveInputParameter resolves an InputParameterSpec -// using a given input context via InputParams. ErrResolvedParameterNull is returned if paramSpec -// is a component input parameter and parameter resolves to a null value (i.e. an optional pipeline input with no -// default). The caller can decide if this is allowed in that context. -func resolveInputParameter( - ctx context.Context, - dag *metadata.DAG, - pipeline *metadata.Pipeline, - opts Options, - mlmd *metadata.Client, - paramSpec *pipelinespec.TaskInputsSpec_InputParameterSpec, - inputParams map[string]*structpb.Value, -) (*structpb.Value, error) { - glog.V(4).Infof("paramSpec: %v", paramSpec) - paramError := func(err error) error { - return fmt.Errorf("resolving input parameter with spec %s: %w", paramSpec, err) - } - switch t := paramSpec.Kind.(type) { - case *pipelinespec.TaskInputsSpec_InputParameterSpec_ComponentInputParameter: - componentInput := paramSpec.GetComponentInputParameter() - if componentInput == "" { - return nil, paramError(fmt.Errorf("empty component input")) - } - v, ok := inputParams[componentInput] - if !ok { - return nil, paramError(fmt.Errorf("parent DAG does not have input parameter %s", componentInput)) - } - - if _, isNullValue := v.GetKind().(*structpb.Value_NullValue); isNullValue { - // Null values are only allowed for optional pipeline input parameters with no values. The caller has this - // context to know if this is allowed. - return nil, fmt.Errorf("%w: %s", ErrResolvedParameterNull, componentInput) - } - - return v, nil - - // This is the case where the input comes from the output of an upstream task. - case *pipelinespec.TaskInputsSpec_InputParameterSpec_TaskOutputParameter: - cfg := resolveUpstreamOutputsConfig{ - ctx: ctx, - paramSpec: paramSpec, - dag: dag, - pipeline: pipeline, - mlmd: mlmd, - err: paramError, - } - v, err := resolveUpstreamParameters(cfg) - if err != nil { - return nil, err - } - return v, nil - case *pipelinespec.TaskInputsSpec_InputParameterSpec_RuntimeValue: - runtimeValue := paramSpec.GetRuntimeValue() - switch t := runtimeValue.Value.(type) { - case *pipelinespec.ValueOrRuntimeParameter_Constant: - val := runtimeValue.GetConstant() - valStr := val.GetStringValue() - var v *structpb.Value - - if strings.Contains(valStr, "{{$.workspace_path}}") { - v = structpb.NewStringValue(strings.ReplaceAll(valStr, "{{$.workspace_path}}", component.WorkspaceMountPath)) - return v, nil - } - - switch valStr { - case "{{$.pipeline_job_name}}": - v = structpb.NewStringValue(opts.RunDisplayName) - case "{{$.pipeline_job_resource_name}}": - v = structpb.NewStringValue(opts.RunName) - case "{{$.pipeline_job_uuid}}": - v = structpb.NewStringValue(opts.RunID) - case "{{$.pipeline_task_name}}": - v = structpb.NewStringValue(opts.TaskName) - case "{{$.pipeline_task_uuid}}": - v = structpb.NewStringValue(fmt.Sprintf("%d", opts.DAGExecutionID)) - default: - v = val - } - - return v, nil - default: - return nil, paramError(fmt.Errorf("param runtime value spec of type %T not implemented", t)) - } - case *pipelinespec.TaskInputsSpec_InputParameterSpec_TaskFinalStatus_: - tasks, err := getDAGTasks(ctx, dag, pipeline, mlmd, nil) - if err != nil { - return nil, err - } - - if len(opts.Task.DependentTasks) < 1 { - return nil, fmt.Errorf("task %v has no dependent tasks", opts.Task.TaskInfo.GetName()) - } - producer, ok := tasks[metadata.GetTaskNameWithDagID(opts.Task.DependentTasks[0], dag.Execution.GetID())] - if !ok { - return nil, fmt.Errorf("producer task, %v, not in tasks", producer.TaskName()) - } - finalStatus := pipelinespec.PipelineTaskFinalStatus{ - State: producer.GetExecution().GetLastKnownState().String(), - PipelineTaskName: producer.TaskName(), - PipelineJobResourceName: opts.RunName, - // TODO: Implement fields "Message and "Code" below for Error status. - Error: &status.Status{}, - } - finalStatusJSON, err := protojson.Marshal(&finalStatus) - if err != nil { - return nil, fmt.Errorf("failed to marshal PipelineTaskFinalStatus: %w", err) - } - - var finalStatusMap map[string]interface{} - if err := json.Unmarshal(finalStatusJSON, &finalStatusMap); err != nil { - return nil, fmt.Errorf("failed to unmarshal JSON of PipelineTaskFinalStatus: %w", err) - } - - finalStatusStruct, err := structpb.NewStruct(finalStatusMap) - if err != nil { - return nil, fmt.Errorf("failed to create structpb.Struct: %w", err) - } - - return structpb.NewStructValue(finalStatusStruct), nil - default: - return nil, paramError(fmt.Errorf("parameter spec of type %T not implemented yet", t)) - } -} - -// resolveInputParameterStr is like resolveInputParameter but returns an error if the resolved value is not a non-empty -// string. -func resolveInputParameterStr( - ctx context.Context, - dag *metadata.DAG, - pipeline *metadata.Pipeline, - opts Options, - mlmd *metadata.Client, - paramSpec *pipelinespec.TaskInputsSpec_InputParameterSpec, - inputParams map[string]*structpb.Value, -) (*structpb.Value, error) { - val, err := resolveInputParameter(ctx, dag, pipeline, opts, mlmd, paramSpec, inputParams) - if err != nil { - return nil, err - } - - if typedVal, ok := val.GetKind().(*structpb.Value_StringValue); ok && typedVal != nil { - if typedVal.StringValue == "" { - return nil, fmt.Errorf("resolving input parameter with spec %s. Expected a non-empty string", paramSpec) - } - } else { - return nil, fmt.Errorf("resolving input parameter with spec %s. Expected a string but got: %T", paramSpec, val.GetKind()) - } - - return val, nil -} - -// resolveInputArtifact resolves an InputArtifactSpec -// using a given input context via inputArtifacts. -func resolveInputArtifact( - ctx context.Context, - dag *metadata.DAG, - pipeline *metadata.Pipeline, - mlmd *metadata.Client, - name string, - artifactSpec *pipelinespec.TaskInputsSpec_InputArtifactSpec, - inputArtifacts map[string]*pipelinespec.ArtifactList, - task *pipelinespec.PipelineTaskSpec, -) (*pipelinespec.ArtifactList, error) { - glog.V(4).Infof("inputs: %#v", task.GetInputs()) - glog.V(4).Infof("artifacts: %#v", task.GetInputs().GetArtifacts()) - artifactError := func(err error) error { - return fmt.Errorf("failed to resolve input artifact %s with spec %s: %w", name, artifactSpec, err) - } - switch t := artifactSpec.Kind.(type) { - case *pipelinespec.TaskInputsSpec_InputArtifactSpec_ComponentInputArtifact: - inputArtifactName := artifactSpec.GetComponentInputArtifact() - if inputArtifactName == "" { - return nil, artifactError(fmt.Errorf("component input artifact key is empty")) - } - v, ok := inputArtifacts[inputArtifactName] - if !ok { - return nil, artifactError(fmt.Errorf("parent DAG does not have input artifact %s", inputArtifactName)) - } - return v, nil - case *pipelinespec.TaskInputsSpec_InputArtifactSpec_TaskOutputArtifact: - cfg := resolveUpstreamOutputsConfig{ - ctx: ctx, - artifactSpec: artifactSpec, - dag: dag, - pipeline: pipeline, - mlmd: mlmd, - err: artifactError, - } - artifacts, err := resolveUpstreamArtifacts(cfg) - if err != nil { - return nil, err - } - return artifacts, nil - default: - return nil, artifactError(fmt.Errorf("artifact spec of type %T not implemented yet", t)) - } -} - -// resolveUpstreamParameters resolves input parameters that come from upstream -// tasks. These tasks can be components/containers, which is relatively -// straightforward, or DAGs, in which case, we need to traverse the graph until -// we arrive at a component/container (since there can be n nested DAGs). -func resolveUpstreamParameters(cfg resolveUpstreamOutputsConfig) (*structpb.Value, error) { - taskOutput := cfg.paramSpec.GetTaskOutputParameter() - glog.V(4).Info("taskOutput: ", taskOutput) - producerTaskName := taskOutput.GetProducerTask() - if producerTaskName == "" { - return nil, cfg.err(fmt.Errorf("producerTaskName is empty")) - } - outputParameterKey := taskOutput.GetOutputParameterKey() - if outputParameterKey == "" { - return nil, cfg.err(fmt.Errorf("output parameter key is empty")) - } - - producerTaskName = metadata.GetTaskNameWithDagID(producerTaskName, cfg.dag.Execution.GetID()) - // For the scenario where 2 tasks are defined within a ParallelFor and 1 - // receives the output of the other we must ensure that the downstream task - // resolves the approriate output of the iteration it is in. With knowing if - // we are resolving inputs for a task within a ParallelFor DAG we can add - // the iteration index to the producerTaskName so that we can resolve the - // correct iteration of that task. - producerTaskName = InferIndexedTaskName(producerTaskName, cfg.dag.Execution) - // Get a list of tasks for the current DAG first. The reason we use - // getDAGTasks instead of mlmd.GetExecutionsInDAG without the dag filter is - // because the latter does not handle task name collisions in the map which - // results in a bunch of unhandled edge cases and test failures. - glog.V(4).Infof("producerTaskName: %v", producerTaskName) - glog.V(4).Infof("outputParameterKey: %v", outputParameterKey) - tasks, err := getDAGTasks(cfg.ctx, cfg.dag, cfg.pipeline, cfg.mlmd, nil) - if err != nil { - return nil, cfg.err(err) - } - - producer, ok := tasks[producerTaskName] - if !ok { - return nil, cfg.err(fmt.Errorf("producer task, %v, not in tasks", producerTaskName)) - } - glog.V(4).Info("producer: ", producer) - glog.V(4).Infof("tasks: %#v", tasks) - currentTask := producer - subTaskName := producerTaskName - // Continue looping until we reach a sub-task that is NOT a DAG. - for { - glog.V(4).Info("currentTask: ", currentTask.TaskName()) - // If the current task is a DAG: - if *currentTask.GetExecution().Type == "system.DAGExecution" { - // Since currentTask is a DAG, we need to deserialize its - // output parameter map so that we can look up its - // corresponding producer sub-task, reassign currentTask, - // and iterate through this loop again. - outputParametersCustomProperty, ok := currentTask.GetExecution().GetCustomProperties()["parameter_producer_task"] - if !ok { - return nil, cfg.err(fmt.Errorf("task, %v, does not have a parameter_producer_task custom property", currentTask.TaskName())) - } - glog.V(4).Infof("outputParametersCustomProperty: %#v", outputParametersCustomProperty) - - dagOutputParametersMap := make(map[string]*pipelinespec.DagOutputsSpec_DagOutputParameterSpec) - glog.V(4).Infof("outputParametersCustomProperty: %v", outputParametersCustomProperty.GetStructValue()) - - for name, value := range outputParametersCustomProperty.GetStructValue().GetFields() { - outputSpec := &pipelinespec.DagOutputsSpec_DagOutputParameterSpec{} - err := protojson.Unmarshal([]byte(value.GetStringValue()), outputSpec) - if err != nil { - return nil, err - } - dagOutputParametersMap[name] = outputSpec - } - - glog.V(4).Infof("Deserialized dagOutputParametersMap: %v", dagOutputParametersMap) - - // For this section, if the currentTask we are looking for is within - // a ParallelFor DAG, this means the actual task that produced the - // output we need has multiple iterations so we have to gather all - // them and fan them in by collecting them into a list i.e. - // kfp.dsl.Collected support. - parentDAG, err := cfg.mlmd.GetExecution(cfg.ctx, currentTask.GetExecution().GetCustomProperties()["parent_dag_id"].GetIntValue()) - if err != nil { - return nil, cfg.err(err) - } - iterations := getParallelForIterationCount(currentTask, parentDAG) - if iterations > 0 { - parameterList, _, err := CollectInputs(cfg, subTaskName, tasks, outputParameterKey, false) - if err != nil { - return nil, cfg.err(err) - } - return parameterList, nil - } - // Support for the 2 DagOutputParameterSpec types: - // ValueFromParameter & ValueFromOneof - subTaskName, outputParameterKey, err = GetProducerTask(currentTask, tasks, subTaskName, outputParameterKey, false) - if err != nil { - return nil, cfg.err(err) - } - glog.V(4).Infof("SubTaskName from outputParams: %v", subTaskName) - glog.V(4).Infof("OutputParameterKey from outputParams: %v", outputParameterKey) - if subTaskName == "" { - return nil, cfg.err(fmt.Errorf("producer_subtask not in outputParams")) - } - - // If the sub-task is a DAG, reassign currentTask and run - glog.V(4).Infof( - "Overriding currentTask, %v, output with currentTask's producer_subtask, %v, output.", - currentTask.TaskName(), - subTaskName, - ) - currentTask, ok = tasks[subTaskName] - if !ok { - return nil, cfg.err(fmt.Errorf("subTaskName, %v, not in tasks", subTaskName)) - } - - } else { - _, outputParametersCustomProperty, err := currentTask.GetParameters() - if err != nil { - return nil, err - } - // Base case - return outputParametersCustomProperty[outputParameterKey], nil - } - } -} - -// resolveUpstreamArtifacts resolves input artifacts that come from upstream -// tasks. These tasks can be components/containers, which is relatively -// straightforward, or DAGs, in which case, we need to traverse the graph until -// we arrive at a component/container (since there can be n nested DAGs). -func resolveUpstreamArtifacts(cfg resolveUpstreamOutputsConfig) (*pipelinespec.ArtifactList, error) { - glog.V(4).Infof("artifactSpec: %#v", cfg.artifactSpec) - taskOutput := cfg.artifactSpec.GetTaskOutputArtifact() - glog.V(4).Info("taskOutput: ", taskOutput) - producerTaskName := taskOutput.GetProducerTask() - if taskOutput.GetProducerTask() == "" { - return nil, cfg.err(fmt.Errorf("producer task is empty")) - } - if taskOutput.GetOutputArtifactKey() == "" { - cfg.err(fmt.Errorf("output artifact key is empty")) - } - producerTaskName = metadata.GetTaskNameWithDagID(producerTaskName, cfg.dag.Execution.GetID()) - // The main difference between the root ParallelFor DAG and its iteration - // DAGs is that the root contains the custom property "iteration_count" - // while the iterations contain "iteration_index". We can use this to - // determine if we are in a ParallelFor DAG or not. The iteration DAGs will - // contain the "iteration_index" which is used to resolve the correct output - // artifact for the downstream task within the iteration. ParallelFor - // iterations are DAGs themselves, we can verify if we are in a iteration by - // confirming that the "iteration_index" exists for the DAG of the current - // task we are attempting to resolve. If the dag contains the - // "iteration_index", the producerTaskName will be updated appropriately - producerTaskName = InferIndexedTaskName(producerTaskName, cfg.dag.Execution) - glog.V(4).Infof("producerTaskName: %v", producerTaskName) - tasks, err := getDAGTasks(cfg.ctx, cfg.dag, cfg.pipeline, cfg.mlmd, nil) - if err != nil { - return nil, cfg.err(err) - } - - producer, ok := tasks[producerTaskName] - if !ok { - return nil, cfg.err( - fmt.Errorf("cannot find producer task %q", producerTaskName), - ) - } - glog.V(4).Info("producer: ", producer) - glog.V(4).Infof("tasks: %#v", tasks) - currentTask := producer - outputArtifactKey := taskOutput.GetOutputArtifactKey() - subTaskName := producerTaskName - // Continue looping until we reach a sub-task that is either a ParallelFor - // task or a Container task. - for { - glog.V(4).Info("currentTask: ", currentTask.TaskName()) - // If the current task is a DAG: - if *currentTask.GetExecution().Type == "system.DAGExecution" { - // Get the sub-task. - parentDAG, err := cfg.mlmd.GetExecution(cfg.ctx, currentTask.GetExecution().GetCustomProperties()["parent_dag_id"].GetIntValue()) - if err != nil { - return nil, cfg.err(err) - } - iterations := getParallelForIterationCount(currentTask, parentDAG) - if iterations > 0 { - _, artifactList, err := CollectInputs(cfg, subTaskName, tasks, outputArtifactKey, true) - if err != nil { - return nil, cfg.err(err) - } - return artifactList, nil - } - subTaskName, outputArtifactKey, err = GetProducerTask(currentTask, tasks, subTaskName, outputArtifactKey, true) - if err != nil { - return nil, cfg.err(err) - } - glog.V(4).Infof("ProducerSubtask: %v", subTaskName) - glog.V(4).Infof("OutputArtifactKey: %v", outputArtifactKey) - // If the sub-task is a DAG, reassign currentTask and run - glog.V(4).Infof("currentTask ID: %v", currentTask.GetID()) - glog.V(4).Infof( - "Overriding currentTask, %v, output with currentTask's producer_subtask, %v, output.", - currentTask.TaskName(), - subTaskName, - ) - currentTask, ok = tasks[subTaskName] - if !ok { - return nil, cfg.err(fmt.Errorf("subTaskName, %v, not in tasks", subTaskName)) - } - } else { - // Base case, currentTask is a container, not a DAG. - outputs, err := cfg.mlmd.GetOutputArtifactsByExecutionId(cfg.ctx, currentTask.GetID()) - if err != nil { - return nil, cfg.err(err) - } - glog.V(4).Infof("outputs: %#v", outputs) - artifact, ok := outputs[outputArtifactKey] - if !ok { - cfg.err( - fmt.Errorf( - "cannot find output artifact key %q in producer task %q", - taskOutput.GetOutputArtifactKey(), - taskOutput.GetProducerTask(), - ), - ) - } - runtimeArtifact, err := artifact.ToRuntimeArtifact() - if err != nil { - cfg.err(err) - } - // Base case - return &pipelinespec.ArtifactList{ - Artifacts: []*pipelinespec.RuntimeArtifact{runtimeArtifact}, - }, nil - } - } -} - -// resolvePodSpecInputRuntimeParameter resolves runtime value that is intended to be -// utilized within the Pod Spec. parameterValue takes the form of: -// "{{$.inputs.parameters['pipelinechannel--someParameterName']}}" -// -// parameterValue is a runtime parameter value that has been resolved and included within -// the executor input. Since the pod spec patch cannot dynamically update the underlying -// container template's inputs in an Argo Workflow, this is a workaround for resolving -// such parameters. -// -// If parameter value is not a parameter channel, then a constant value is assumed and -// returned as is. -func resolvePodSpecInputRuntimeParameter(parameterValue string, executorInput *pipelinespec.ExecutorInput) (string, error) { - if isInputParameterChannel(parameterValue) { - inputImage, err := extractInputParameterFromChannel(parameterValue) - if err != nil { - return "", err - } - if val, ok := executorInput.Inputs.ParameterValues[inputImage]; ok { - return val.GetStringValue(), nil - } else { - return "", fmt.Errorf("executorInput did not contain container Image input parameter") - } - } - return parameterValue, nil -} - -// resolveK8sJsonParameter resolves a k8s JSON and unmarshal it -// to the provided k8s resource. -// -// Parameters: -// - pipelineInputParamSpec: An input parameter spec that resolve to a valid JSON -// - inputParams: InputParams that contain resolution context for pipelineInputParamSpec -// - res: The k8s resource to unmarshal the json to -func resolveK8sJsonParameter[k8sResource any]( - ctx context.Context, - opts Options, - dag *metadata.DAG, - pipeline *metadata.Pipeline, - mlmd *metadata.Client, - pipelineInputParamSpec *pipelinespec.TaskInputsSpec_InputParameterSpec, - inputParams map[string]*structpb.Value, - res *k8sResource, -) error { - resolvedParam, err := resolveInputParameter(ctx, dag, pipeline, opts, mlmd, - pipelineInputParamSpec, inputParams) - if err != nil { - return fmt.Errorf("failed to resolve k8s parameter: %w", err) - } - paramJSON, err := resolvedParam.GetStructValue().MarshalJSON() - if err != nil { - return err - } - err = json.Unmarshal(paramJSON, &res) - if err != nil { - return fmt.Errorf("failed to unmarshal k8s Resource json "+ - "ensure that k8s Resource json correctly adheres to its respective k8s spec: %w", err) - } - return nil -} - -// CollectInputs performs artifact/parameter collection across a DAG/tree -// using a breadth first search traversal. -func CollectInputs( - cfg resolveUpstreamOutputsConfig, - parallelForDAGTaskName string, - tasks map[string]*metadata.Execution, - outputKey string, - isArtifact bool, -) (outputParameterList *structpb.Value, outputArtifactList *pipelinespec.ArtifactList, err error) { - glog.V(4).Infof("currentTask is a ParallelFor DAG. Attempting to gather all nested producer_subtasks") - // Set some helpers for the start and looping for BFS - var currentTask *metadata.Execution - var workingSubTaskName string - workingOutputKey := outputKey - previousWorkingOutputKey := outputKey - // Instantiate the lists values that will hold all values pulled from the - // tasks of each iteration. - parallelForParameterList := make([]*structpb.Value, 0) - parallelForArtifactList := make([]*pipelinespec.RuntimeArtifact, 0) - tasksToResolve := make([]string, 0) - // Set up the queue for BFS by setting the parallelFor DAG task as the - // initial node. The loop will add the iteration dag task names for us into - // the slice/queue. - tasksToResolve = append(tasksToResolve, parallelForDAGTaskName) - previousTaskName := tasks[tasksToResolve[0]].TaskName() - - for len(tasksToResolve) > 0 { - // The starterQueue contains the first set of child DAGs from the - // parallelFor, i.e. the iteration dags. - glog.V(4).Infof("tasksToResolve: %v", tasksToResolve) - currentTaskName := tasksToResolve[0] - tasksToResolve = tasksToResolve[1:] - - currentTask = tasks[currentTaskName] - - // We check if these values need to be updated going through the - // resolution of dags/tasks Most commonly the subTaskName will change - // for both parameter & artifact resolution. For parameter resolutions, - // the outputParameterKey can change, and is used for extracting the - // appropriate field off of the struct set for the outputs on the task - // in question. - - // An issue arises if we update the outputParameterKey but there exists - // multiple iterations of the same task and we haven't fully parsed all - // iterations. We will encounter a scenario where we will attempt to - // extract fields from the struct with the wrong key. Hence, the - // condition below. NOTE: This is only an issue for Parameter resolution - // and does not interfere with Artifact resolution. - if currentTask.TaskName() == previousTaskName { - workingOutputKey = previousWorkingOutputKey - } - - previousTaskName = currentTask.TaskName() - previousWorkingOutputKey = workingOutputKey - workingSubTaskName, workingOutputKey, _ = GetProducerTask(currentTask, tasks, workingSubTaskName, workingOutputKey, isArtifact) - - glog.V(4).Infof("currentTask ID: %v", currentTask.GetID()) - glog.V(4).Infof("currentTask Name: %v", currentTask.TaskName()) - glog.V(4).Infof("currentTask Type: %v", currentTask.GetExecution().GetType()) - glog.V(4).Infof("workingSubTaskName %v", workingSubTaskName) - glog.V(4).Infof("workingOutputKey: %v", workingOutputKey) - - iterations := currentTask.GetExecution().GetCustomProperties()["iteration_count"] - iterationIndex := currentTask.GetExecution().GetCustomProperties()["iteration_index"] - - // Base cases for handling the task that actually maps to the task that - // created the artifact/parameter we are searching for. - - // Base case 1: currentTask is a ContainerExecution that we can load - // the values off of. - if *currentTask.GetExecution().Type == "system.ContainerExecution" { - glog.V(4).Infof("currentTask, %v, is a ContainerExecution", currentTaskName) - paramValue, artifact, err := collectContainerOutput(cfg, currentTask, workingOutputKey, isArtifact) - if err != nil { - return nil, nil, err - } - if isArtifact { - parallelForArtifactList = append(parallelForArtifactList, artifact) - glog.V(4).Infof("parallelForArtifactList: %v", parallelForArtifactList) - } else { - parallelForParameterList = append(parallelForParameterList, paramValue) - glog.V(4).Infof("parallelForParameterList: %v", parallelForParameterList) - } - continue - } - // Base case 2: currentTask is a DAGExecution within a loop but is - // NOT a ParallelFor Head DAG - if iterations == nil { - tempSubTaskName := workingSubTaskName - if iterationIndex != nil { - // handle for parallel iteration dag, i.e one of the DAG - // instances of the loop. - tempSubTaskName = metadata.GetParallelForTaskName(tempSubTaskName, iterationIndex.GetIntValue()) - glog.V(4).Infof("subTaskIterationName: %v", tempSubTaskName) - } - glog.V(4).Infof("tempSubTaskName: %v", tempSubTaskName) - tasksToResolve = append(tasksToResolve, tempSubTaskName) - continue - } - - // If the currentTask is not a ContainerExecution AND we have the - // custom property set for "iteration_count", we can deduce that - // currentTask is in fact a ParallelFor Head DAG, thus we need to add - // its iteration DAGs to the queue. - - for i := range iterations.GetIntValue() { - loopName := metadata.GetTaskNameWithDagID(currentTask.TaskName(), currentTask.GetID()) - loopIterationName := metadata.GetParallelForTaskName(loopName, i) - glog.V(4).Infof("loopIterationName: %v", loopIterationName) - tasksToResolve = append(tasksToResolve, loopIterationName) - } - } - - outputParameterList = &structpb.Value{ - Kind: &structpb.Value_ListValue{ - ListValue: &structpb.ListValue{ - Values: parallelForParameterList, - }, - }, - } - outputArtifactList = &pipelinespec.ArtifactList{ - Artifacts: parallelForArtifactList, - } - glog.V(4).Infof("outputParameterList: %#v", outputParameterList) - glog.V(4).Infof("outputArtifactList: %#v", outputArtifactList) - return outputParameterList, outputArtifactList, nil -} - -// collectContainerOutput pulls either the artifact or parameter that is a -// task's output where said task was called within a parallelFor loop -func collectContainerOutput( - cfg resolveUpstreamOutputsConfig, - currentTask *metadata.Execution, - workingOutputKey string, - isArtifact bool, -) (*structpb.Value, *pipelinespec.RuntimeArtifact, error) { - var param *structpb.Value - var artifact *pipelinespec.RuntimeArtifact - if isArtifact { - outputArtifacts, err := cfg.mlmd.GetOutputArtifactsByExecutionId(cfg.ctx, currentTask.GetID()) - if err != nil { - return nil, nil, err - } - glog.V(4).Infof("outputArtifacts: %#v", outputArtifacts) - glog.V(4).Infof("outputKey: %v", workingOutputKey) - artifact, err = outputArtifacts[workingOutputKey].ToRuntimeArtifact() - if err != nil { - return nil, nil, cfg.err(err) - } - glog.V(4).Infof("runtimeArtifact: %v", artifact) - } else { - _, outputParameters, err := currentTask.GetParameters() - glog.V(4).Infof("outputParameters: %v", outputParameters) - if err != nil { - return nil, nil, cfg.err(err) - } - param = outputParameters[workingOutputKey] - } - return param, artifact, nil -} - -// GetProducerTask gets the updated ProducerSubTask / -// Output[Artifact|Parameter]Key if they exists, else it returns the original -// input. -func GetProducerTask(parentTask *metadata.Execution, tasks map[string]*metadata.Execution, subTaskName string, outputKey string, isArtifact bool) (producerSubTaskName string, tempOutputKey string, err error) { - tempOutputKey = outputKey - if isArtifact { - producerTaskValue := parentTask.GetExecution().GetCustomProperties()["artifact_producer_task"] - if producerTaskValue != nil { - var tempOutputArtifacts map[string]*pipelinespec.DagOutputsSpec_DagOutputArtifactSpec - err := json.Unmarshal([]byte(producerTaskValue.GetStringValue()), &tempOutputArtifacts) - if err != nil { - return "", "", err - } - glog.V(4).Infof("tempOutputsArtifacts: %v", tempOutputArtifacts) - glog.V(4).Infof("outputArtifactKey: %v", outputKey) - tempSelectors := tempOutputArtifacts[outputKey].GetArtifactSelectors() - if len(tempSelectors) > 0 { - producerSubTaskName = tempSelectors[len(tempSelectors)-1].ProducerSubtask - tempOutputKey = tempSelectors[len(tempSelectors)-1].OutputArtifactKey - } - } - - } else { - producerTaskValue := parentTask.GetExecution().GetCustomProperties()["parameter_producer_task"] - if producerTaskValue != nil { - tempOutputParametersMap := make(map[string]*pipelinespec.DagOutputsSpec_DagOutputParameterSpec) - for name, value := range producerTaskValue.GetStructValue().GetFields() { - outputSpec := &pipelinespec.DagOutputsSpec_DagOutputParameterSpec{} - err := protojson.Unmarshal([]byte(value.GetStringValue()), outputSpec) - if err != nil { - return "", "", err - } - tempOutputParametersMap[name] = outputSpec - } - glog.V(4).Infof("tempOutputParametersMap: %#v", tempOutputParametersMap) - switch tempOutputParametersMap[tempOutputKey].Kind.(type) { - case *pipelinespec.DagOutputsSpec_DagOutputParameterSpec_ValueFromParameter: - producerSubTaskName = tempOutputParametersMap[tempOutputKey].GetValueFromParameter().GetProducerSubtask() - tempOutputKey = tempOutputParametersMap[tempOutputKey].GetValueFromParameter().GetOutputParameterKey() - case *pipelinespec.DagOutputsSpec_DagOutputParameterSpec_ValueFromOneof: - // When OneOf is specified in a pipeline, the output of only 1 - // task is consumed even though there may be more than 1 task - // output set. In this case we will attempt to grab the first - // successful task output. - paramSelectors := tempOutputParametersMap[tempOutputKey].GetValueFromOneof().GetParameterSelectors() - glog.V(4).Infof("paramSelectors: %v", paramSelectors) - // Since we have the tasks map, we can iterate through the - // parameterSelectors if the ProducerSubTask is not present in - // the task map and then assign the new OutputParameterKey only - // if it exists. - successfulOneOfTask := false - for _, paramSelector := range paramSelectors { - producerSubTaskName = paramSelector.GetProducerSubtask() - // Used just for retrieval since we lookup the task in the map - updatedSubTaskName := metadata.GetTaskNameWithDagID(producerSubTaskName, parentTask.GetID()) - glog.V(4).Infof("subTaskName with Dag ID from paramSelector: %v", updatedSubTaskName) - glog.V(4).Infof("outputParameterKey from paramSelector: %v", paramSelector.GetOutputParameterKey()) - if subTask, ok := tasks[updatedSubTaskName]; ok { - subTaskState := subTask.GetExecution().GetLastKnownState().String() - glog.V(4).Infof("subTask: %w , subTaskState: %v", updatedSubTaskName, subTaskState) - if subTaskState == "CACHED" || subTaskState == "COMPLETE" { - tempOutputKey = paramSelector.GetOutputParameterKey() - successfulOneOfTask = true - break - } - } - } - if !successfulOneOfTask { - return "", "", fmt.Errorf("processing OneOf: No successful task found") - } - } - } - } - if producerSubTaskName != "" { - producerSubTaskName = metadata.GetTaskNameWithDagID(producerSubTaskName, parentTask.GetID()) - } else { - producerSubTaskName = subTaskName - } - return producerSubTaskName, tempOutputKey, nil -} - -// Helper for determining if the current producerTask in question needs to pull from an iteration dag that it may exist in. -func InferIndexedTaskName(producerTaskName string, dag *metadata.Execution) string { - // Check if the DAG in question is a parallelFor iteration DAG. If it is, we need to - // update the producerTaskName so the downstream task resolves the appropriate index. - if dag.GetExecution().GetCustomProperties()["iteration_index"] != nil { - task_iteration_index := dag.GetExecution().GetCustomProperties()["iteration_index"].GetIntValue() - producerTaskName = metadata.GetParallelForTaskName(producerTaskName, task_iteration_index) - glog.V(4).Infof("TaskIteration - ProducerTaskName: %v", producerTaskName) - glog.Infof("Attempting to retrieve outputs from a ParallelFor iteration") - } - return producerTaskName -} - -// Helper for checking if collecting outputs is required for downstream tasks. -func getParallelForIterationCount(task *metadata.Execution, dag *metadata.Execution) int64 { - iterations := task.GetExecution().GetCustomProperties()["iteration_count"] - glog.V(4).Infof("task: %v, iterations: %v", task.TaskName(), iterations) - if iterations == nil { - glog.V(4).Infof("No iteration_count found on task %v, checking associated DAG", task.TaskName()) - iterations = dag.GetExecution().GetCustomProperties()["iteration_count"] - glog.V(4).Infof("dag: %v, iterations: %v", dag.TaskName(), iterations) - } - return iterations.GetIntValue() -} diff --git a/backend/src/v2/driver/resolver/artifacts.go b/backend/src/v2/driver/resolver/artifacts.go new file mode 100644 index 00000000000..035e1d9e1d9 --- /dev/null +++ b/backend/src/v2/driver/resolver/artifacts.go @@ -0,0 +1,328 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package resolver provides parameter and artifact resolution for KFP v2 driver. +package resolver + +import ( + "fmt" + + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/backend/src/v2/driver/common" +) + +func resolveArtifacts(opts common.Options) ([]ArtifactMetadata, error) { + var artifacts []ArtifactMetadata + + for key, artifactSpec := range opts.Task.GetInputs().GetArtifacts() { + v, ioType, err := resolveInputArtifact(opts, key, artifactSpec, opts.ParentTask.Inputs.GetArtifacts()) + if err != nil { + return nil, err + } + + producer := v.GetProducer() + if producer == nil { + return nil, fmt.Errorf("producer cannot be nil") + } + + am := ArtifactMetadata{ + Key: key, + InputArtifactSpec: artifactSpec, + ArtifactIO: &apiv2beta1.PipelineTaskDetail_InputOutputs_IOArtifact{ + Artifacts: v.Artifacts, + Type: ioType, + ArtifactKey: key, + Producer: v.GetProducer(), + }, + } + if opts.IterationIndex >= 0 { + am.ArtifactIO.Producer.Iteration = util.Int64Pointer(int64(opts.IterationIndex)) + } + artifacts = append(artifacts, am) + } + + return artifacts, nil +} + +func resolveInputArtifact( + opts common.Options, + name string, + artifactSpec *pipelinespec.TaskInputsSpec_InputArtifactSpec, + inputArtifacts []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOArtifact, +) (*apiv2beta1.PipelineTaskDetail_InputOutputs_IOArtifact, apiv2beta1.IOType, error) { + artifactError := func(err error) error { + return fmt.Errorf("failed to resolve input artifact %s with spec %s: %w", name, artifactSpec, err) + } + switch t := artifactSpec.Kind.(type) { + case *pipelinespec.TaskInputsSpec_InputArtifactSpec_ComponentInputArtifact: + artifactIO, err := resolveArtifactComponentInputParameter(opts, artifactSpec, inputArtifacts) + if err != nil { + return nil, apiv2beta1.IOType_COMPONENT_INPUT, artifactError(err) + } + return artifactIO, apiv2beta1.IOType_COMPONENT_INPUT, nil + case *pipelinespec.TaskInputsSpec_InputArtifactSpec_TaskOutputArtifact: + artifact, err := resolveTaskOutputArtifact(opts, artifactSpec) + if err != nil { + return nil, apiv2beta1.IOType_TASK_OUTPUT_INPUT, err + } + ioType := apiv2beta1.IOType_TASK_OUTPUT_INPUT + if artifact.GetType() == apiv2beta1.IOType_COLLECTED_INPUTS { + ioType = apiv2beta1.IOType_COLLECTED_INPUTS + } + return artifact, ioType, nil + default: + return nil, apiv2beta1.IOType_UNSPECIFIED, artifactError(fmt.Errorf("artifact spec of type %T not implemented yet", t)) + } +} + +func resolveArtifactComponentInputParameter( + opts common.Options, + artifactSpec *pipelinespec.TaskInputsSpec_InputArtifactSpec, + inputArtifactsIO []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOArtifact, +) (*apiv2beta1.PipelineTaskDetail_InputOutputs_IOArtifact, error) { + key := artifactSpec.GetComponentInputArtifact() + if key == "" { + return nil, fmt.Errorf("empty component input") + } + + for _, artifactIO := range inputArtifactsIO { + ioKey := artifactIO.GetArtifactKey() + if key == ioKey { + if !common.IsLoopArgument(key) { + return artifactIO, nil + } + if artifactIO.Producer != nil && artifactIO.Producer.Iteration != nil && *artifactIO.Producer.Iteration == int64(opts.IterationIndex) { + return artifactIO, nil + } + return artifactIO, nil + } + } + return nil, fmt.Errorf("failed to find input artifact %s", key) + +} + +func resolveTaskOutputArtifact( + opts common.Options, + spec *pipelinespec.TaskInputsSpec_InputArtifactSpec, +) (*apiv2beta1.PipelineTaskDetail_InputOutputs_IOArtifact, error) { + tasks, err := getSubTasks(opts.ParentTask, opts.Run.Tasks, nil) + if err != nil { + return nil, err + } + if tasks == nil { + return nil, fmt.Errorf("failed to get sub tasks for task %s", opts.ParentTask.Name) + } + producerTaskAmbiguousName := spec.GetTaskOutputArtifact().GetProducerTask() + if producerTaskAmbiguousName == "" { + return nil, fmt.Errorf("producerTask task cannot be empty") + } + producerTaskUniqueName := getTaskNameWithTaskID(producerTaskAmbiguousName, opts.ParentTask.GetTaskId()) + if opts.IterationIndex >= 0 { + producerTaskUniqueName = getTaskNameWithIterationIndex(producerTaskUniqueName, int64(opts.IterationIndex)) + } + // producerTask is the specific task guaranteed to have the output artifact + // producerTaskUniqueName may look something like "task_name_a_dag_id_1_idx_0" + producerTask := tasks[producerTaskUniqueName] + if producerTask == nil { + return nil, fmt.Errorf("producerTask task %s not found", producerTaskUniqueName) + } + outputKey := spec.GetTaskOutputArtifact().GetOutputArtifactKey() + outputs := producerTask.GetOutputs().GetArtifacts() + outputIO, err := findArtifactByProducerKeyInList(outputKey, producerTask.GetName(), outputs) + if err != nil { + return nil, err + } + if outputIO == nil { + return nil, fmt.Errorf("output artifact %s not found", outputKey) + } + return outputIO, nil +} + +// resolveArtifactIterator handles Artifact Iterator Input resolution +func resolveArtifactIterator( + opts common.Options, + artifacts []ArtifactMetadata, +) ([]ArtifactMetadata, *int, error) { + artifactIterator := opts.Task.GetArtifactIterator() + // This should be the key input into the for loop task + iteratorInputDefinitionKey := artifactIterator.GetItemInput() + // Used to look up the Artifact from the resolved list + // The key here should map to a ArtifactMetadata.Key that + // was resolved in the prior loop. + sourceInputArtifactKey := artifactIterator.GetItems().GetInputArtifact() + artifactIO, err := findArtifactByIOKey(sourceInputArtifactKey, artifacts) + if err != nil { + return nil, nil, err + } + + var artifactMetadataList []ArtifactMetadata + for i, artifact := range artifactIO.Artifacts { + am := ArtifactMetadata{ + Key: iteratorInputDefinitionKey, + ArtifactIO: &apiv2beta1.PipelineTaskDetail_InputOutputs_IOArtifact{ + // Iteration over artifact lists is not supported yet. + Artifacts: []*apiv2beta1.Artifact{artifact}, + Type: apiv2beta1.IOType_ITERATOR_INPUT, + ArtifactKey: iteratorInputDefinitionKey, + Producer: &apiv2beta1.IOProducer{ + TaskName: opts.ParentTask.Name, + Iteration: util.Int64Pointer(int64(i)), + }, + }, + ArtifactIterator: artifactIterator, + } + artifactMetadataList = append(artifactMetadataList, am) + } + + count := len(artifactIO.Artifacts) + return artifactMetadataList, &count, nil +} + +// generateUniqueTaskName generates a unique task name for a given task. +func generateUniqueTaskName(task, parentTask *apiv2beta1.PipelineTaskDetail) (string, error) { + if task == nil || task.Name == "" || parentTask == nil { + return "", fmt.Errorf("parenttask and task can't be nil and task name cannot be empty") + } + taskName := getTaskNameWithTaskID(task.Name, parentTask.TaskId) + if common.IsRuntimeIterationTask(task) { + if task.TypeAttributes == nil || task.TypeAttributes.IterationIndex == nil { + return "", fmt.Errorf("iteration index cannot be nil for loop iteration") + } + taskName = getTaskNameWithIterationIndex(taskName, *task.TypeAttributes.IterationIndex) + } else if common.IsRuntimeIterationTask(parentTask) { + if parentTask.TypeAttributes == nil || parentTask.TypeAttributes.IterationIndex == nil { + return "", fmt.Errorf("iteration index cannot be nil for loop iteration") + } + taskName = getTaskNameWithIterationIndex(taskName, *parentTask.TypeAttributes.IterationIndex) + } + return taskName, nil +} + +func getChildTasks( + tasks []*apiv2beta1.PipelineTaskDetail, + parentTask *apiv2beta1.PipelineTaskDetail, +) (map[string]*apiv2beta1.PipelineTaskDetail, error) { + if parentTask == nil { + return nil, fmt.Errorf("parent task cannot be nil") + } + var taskMap = make(map[string]*apiv2beta1.PipelineTaskDetail) + for _, task := range tasks { + if task.GetParentTaskId() == parentTask.GetTaskId() { + taskName, err := generateUniqueTaskName(task, parentTask) + if err != nil { + return nil, err + } + if taskName == "" { + return nil, fmt.Errorf("task name cannot be empty") + } + taskMap[taskName] = task + } + } + return taskMap, nil +} + +// getSubTasks creates a map of all subtasks under currentTask. +// The keys of the map are formed by concatenating the task name with the task id. +// If the task is a runtime iteration task, then the key is formed by concatenating +// the task name with the iteration index. +// So you may end up with a map of the form: +// +// { +// "task_name_a_dag_id_1_idx_0": { +// ... +// }, +// "task_name_a_dag_id_1_idx_1": { +// ... +// }, +// "task_name_b_dag_id_2": { +// ... +// }, +// }, +func getSubTasks( + currentTask *apiv2beta1.PipelineTaskDetail, + allRuntasks []*apiv2beta1.PipelineTaskDetail, + flattenedTasks map[string]*apiv2beta1.PipelineTaskDetail, +) (map[string]*apiv2beta1.PipelineTaskDetail, error) { + if flattenedTasks == nil { + flattenedTasks = make(map[string]*apiv2beta1.PipelineTaskDetail) + } + taskChildren, err := getChildTasks(allRuntasks, currentTask) + if err != nil { + return nil, fmt.Errorf("failed to get child tasks for task %s: %w", currentTask.Name, err) + } + for taskName, task := range taskChildren { + flattenedTasks[taskName] = task + } + for _, task := range taskChildren { + if task.Type != apiv2beta1.PipelineTaskDetail_RUNTIME { + flattenedTasks, err = getSubTasks(task, allRuntasks, flattenedTasks) + if err != nil { + return nil, err + } + } + } + return flattenedTasks, nil +} + +func getTaskNameWithIterationIndex(taskName string, iterationIndex int64) string { + return fmt.Sprintf("%s_idx_%d", taskName, iterationIndex) +} + +func getTaskNameWithTaskID(taskName, taskID string) string { + return fmt.Sprintf("%s_%s", taskName, taskID) +} + +func findArtifactByProducerKeyInList( + producerKey, producerTaskName string, + artifactsIO []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOArtifact, +) (*apiv2beta1.PipelineTaskDetail_InputOutputs_IOArtifact, error) { + var artifactIOList []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOArtifact + for _, artifactIO := range artifactsIO { + if artifactIO.GetArtifactKey() == producerKey { + artifactIOList = append(artifactIOList, artifactIO) + } + } + if len(artifactIOList) == 0 { + return nil, fmt.Errorf("artifact with producer key %s not found", producerKey) + } + + // This occurs in the parallelFor case, where multiple iterations resulted in the same + // producer key. + isCollection := len(artifactIOList) > 1 + if isCollection { + var artifacts []*apiv2beta1.Artifact + for _, artifactIO := range artifactIOList { + // Check correctness by validating the type of all parameters + if artifactIO.Type != apiv2beta1.IOType_ITERATOR_OUTPUT { + return nil, fmt.Errorf("encountered a non iterator output that has the same producer key (%s)", producerKey) + } + // Support for an iterator over list of artifacts is not supported yet. + artifacts = append(artifacts, artifactIO.Artifacts[0]) + } + ioType := apiv2beta1.IOType_COLLECTED_INPUTS + newArtifactIO := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOArtifact{ + Artifacts: artifacts, + Type: ioType, + ArtifactKey: producerKey, + // This is unused by the caller + Producer: &apiv2beta1.IOProducer{ + TaskName: producerTaskName, + }, + } + return newArtifactIO, nil + } + return artifactIOList[0], nil +} diff --git a/backend/src/v2/driver/resolver/parameters.go b/backend/src/v2/driver/resolver/parameters.go new file mode 100644 index 00000000000..6352a612495 --- /dev/null +++ b/backend/src/v2/driver/resolver/parameters.go @@ -0,0 +1,572 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resolver + +import ( + "encoding/json" + "errors" + "fmt" + "strings" + + "github.com/golang/glog" + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/backend/src/v2/component" + "github.com/kubeflow/pipelines/backend/src/v2/driver/common" + "google.golang.org/genproto/googleapis/rpc/status" + "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/types/known/structpb" +) + +func resolveParameters(opts common.Options) ([]ParameterMetadata, error) { + var parameters []ParameterMetadata + for key, paramSpec := range opts.Task.GetInputs().GetParameters() { + if compParam := opts.Component.GetInputDefinitions().GetParameters()[key]; compParam != nil { + // Skip resolving dsl.TaskConfig because that information is only available after initPodSpecPatch and + // extendPodSpecPatch are called. + if compParam.GetParameterType() == pipelinespec.ParameterType_TASK_CONFIG { + continue + } + } + + v, ioType, err := ResolveInputParameter(opts, paramSpec, opts.ParentTask.Inputs.GetParameters()) + if err != nil { + if !errors.Is(err, ErrResolvedParameterNull) { + return nil, err + } + componentParam, ok := opts.Component.GetInputDefinitions().GetParameters()[key] + if ok && componentParam != nil && componentParam.IsOptional { + // If the resolved parameter was null and the component input parameter is optional, + // check if there's a default value we should use + if componentParam.GetDefaultValue() != nil { + // Add parameter with default value + pm := ParameterMetadata{ + Key: key, + InputParameterSpec: paramSpec, + ParameterIO: &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: componentParam.GetDefaultValue(), + Type: apiv2beta1.IOType_COMPONENT_DEFAULT_INPUT, + ParameterKey: key, + Producer: &apiv2beta1.IOProducer{ + TaskName: opts.ParentTask.GetName(), + }, + }, + } + if opts.IterationIndex >= 0 { + pm.ParameterIO.Producer.Iteration = util.Int64Pointer(int64(opts.IterationIndex)) + } + parameters = append(parameters, pm) + continue + } + // No default value, skip it + continue + } + return nil, err + } + + producer := v.GetProducer() + if producer == nil { + return nil, fmt.Errorf("producer cannot be nil") + } + + pm := ParameterMetadata{ + Key: key, + InputParameterSpec: paramSpec, + ParameterIO: &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: v.GetValue(), + Type: ioType, + ParameterKey: key, + Producer: producer, + }, + } + if opts.IterationIndex >= 0 { + pm.ParameterIO.Producer.Iteration = util.Int64Pointer(int64(opts.IterationIndex)) + } + parameters = append(parameters, pm) + } + + // Check for parameters in the Component's InputDefinitions that aren't in the task's inputs + // and add them with their default values if they have one + if opts.Component.GetInputDefinitions() != nil { + // Build a map of existing parameter keys + existingParams := make(map[string]bool) + for key := range opts.Task.GetInputs().GetParameters() { + existingParams[key] = true + } + + // Find default parameters that aren't already in the task + for name, paramSpec := range opts.Component.GetInputDefinitions().GetParameters() { + // Skip if parameter is already in the task's inputs or doesn't have a default value + if existingParams[name] || paramSpec.GetDefaultValue() == nil { + continue + } + + // Skip TASK_CONFIG parameters + if paramSpec.GetParameterType() == pipelinespec.ParameterType_TASK_CONFIG { + continue + } + + // Only add if it's optional + if !paramSpec.IsOptional { + continue + } + + // Add parameter with default value + pm := ParameterMetadata{ + Key: name, + ParameterIO: &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: paramSpec.GetDefaultValue(), + Type: apiv2beta1.IOType_COMPONENT_DEFAULT_INPUT, + ParameterKey: name, + Producer: &apiv2beta1.IOProducer{ + TaskName: opts.ParentTask.GetName(), + }, + }, + } + if opts.IterationIndex >= 0 { + pm.ParameterIO.Producer.Iteration = util.Int64Pointer(int64(opts.IterationIndex)) + } + parameters = append(parameters, pm) + } + } + + return parameters, nil +} + +func ResolveInputParameter( + opts common.Options, + paramSpec *pipelinespec.TaskInputsSpec_InputParameterSpec, + inputParams []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter, +) (*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter, apiv2beta1.IOType, error) { + switch t := paramSpec.Kind.(type) { + case *pipelinespec.TaskInputsSpec_InputParameterSpec_ComponentInputParameter: + glog.V(4).Infof("resolving component input parameter %s", paramSpec.GetComponentInputParameter()) + resolvedInput, err := resolveParameterComponentInputParameter(opts, paramSpec, inputParams) + if err != nil { + return nil, apiv2beta1.IOType_COMPONENT_INPUT, err + } + return resolvedInput, apiv2beta1.IOType_COMPONENT_INPUT, nil + case *pipelinespec.TaskInputsSpec_InputParameterSpec_TaskOutputParameter: + parameter, err := resolveTaskOutputParameter(opts, paramSpec) + if err != nil { + return nil, apiv2beta1.IOType_TASK_OUTPUT_INPUT, err + } + ioType := apiv2beta1.IOType_TASK_OUTPUT_INPUT + if parameter.GetType() == apiv2beta1.IOType_COLLECTED_INPUTS { + ioType = apiv2beta1.IOType_COLLECTED_INPUTS + } + return parameter, ioType, nil + case *pipelinespec.TaskInputsSpec_InputParameterSpec_RuntimeValue: + glog.V(4).Infof("resolving runtime value %s", paramSpec.GetRuntimeValue().String()) + runtimeValue := paramSpec.GetRuntimeValue() + switch t := runtimeValue.Value.(type) { + case *pipelinespec.ValueOrRuntimeParameter_Constant: + val := runtimeValue.GetConstant() + valStr := val.GetStringValue() + var v *structpb.Value + if strings.Contains(valStr, "{{$.workspace_path}}") { + v = structpb.NewStringValue(strings.ReplaceAll(valStr, "{{$.workspace_path}}", component.WorkspaceMountPath)) + ioParameter := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + ParameterKey: "", + Value: v, + Producer: &apiv2beta1.IOProducer{ + TaskName: opts.ParentTask.GetName(), + }, + } + return ioParameter, apiv2beta1.IOType_RUNTIME_VALUE_INPUT, nil + } + switch valStr { + case "{{$.pipeline_job_name}}": + v = structpb.NewStringValue(opts.RunDisplayName) + case "{{$.pipeline_job_resource_name}}": + v = structpb.NewStringValue(opts.RunName) + case "{{$.pipeline_job_uuid}}": + v = structpb.NewStringValue(opts.Run.GetRunId()) + case "{{$.pipeline_task_name}}": + v = structpb.NewStringValue(opts.TaskName) + case "{{$.pipeline_task_uuid}}": + if opts.ParentTask == nil { + return nil, apiv2beta1.IOType_UNSPECIFIED, fmt.Errorf("parent task should not be nil") + } + v = structpb.NewStringValue(opts.ParentTask.GetTaskId()) + default: + v = val + } + // When a constant runtime value is a pipeline channel, then we expect the source value to be found + // via the pipeline channel's key within this task's inputs. + isMatch, isPipelineChannel, paramName := common.ParsePipelineParam(v.GetStringValue()) + if isMatch && isPipelineChannel { + channelParamSpec, ok := opts.Task.Inputs.GetParameters()[paramName] + if !ok { + return nil, apiv2beta1.IOType_RUNTIME_VALUE_INPUT, fmt.Errorf("pipeline channel %s not found in task %s", v.GetStringValue(), opts.TaskName) + } + return ResolveInputParameter(opts, channelParamSpec, inputParams) + } + + ioParameter := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + ParameterKey: "", + Value: v, + Producer: &apiv2beta1.IOProducer{ + TaskName: opts.ParentTask.GetName(), + }, + } + return ioParameter, apiv2beta1.IOType_RUNTIME_VALUE_INPUT, nil + default: + return nil, apiv2beta1.IOType_UNSPECIFIED, paramError(paramSpec, fmt.Errorf("param runtime value spec of type %T not implemented", t)) + } + case *pipelinespec.TaskInputsSpec_InputParameterSpec_TaskFinalStatus_: + value, err := resolveTaskFinalStatus(opts, paramSpec) + if err != nil { + return nil, apiv2beta1.IOType_TASK_FINAL_STATUS_OUTPUT, err + } + return &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + ParameterKey: "status", + Value: value, + Producer: &apiv2beta1.IOProducer{ + TaskName: opts.ParentTask.GetName(), + }, + }, apiv2beta1.IOType_TASK_FINAL_STATUS_OUTPUT, nil + default: + return nil, apiv2beta1.IOType_UNSPECIFIED, paramError(paramSpec, fmt.Errorf("parameter spec of type %T not implemented yet", t)) + } +} + +func resolveTaskOutputParameter( + opts common.Options, + spec *pipelinespec.TaskInputsSpec_InputParameterSpec, +) (*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter, error) { + tasks, err := getSubTasks(opts.ParentTask, opts.Run.Tasks, nil) + if err != nil { + return nil, err + } + if tasks == nil { + return nil, fmt.Errorf("failed to get sub tasks for task %s", opts.ParentTask.Name) + } + producerTaskAmbiguousName := spec.GetTaskOutputParameter().GetProducerTask() + if producerTaskAmbiguousName == "" { + return nil, fmt.Errorf("producerTask task cannot be empty") + } + producerTaskUniqueName := getTaskNameWithTaskID(producerTaskAmbiguousName, opts.ParentTask.GetTaskId()) + if opts.IterationIndex >= 0 { + producerTaskUniqueName = getTaskNameWithIterationIndex(producerTaskUniqueName, int64(opts.IterationIndex)) + } + // producerTask is the specific task guaranteed to have the output parameter + // producerTaskUniqueName may look something like "task_name_a_dag_id_1_idx_0" + producerTask := tasks[producerTaskUniqueName] + if producerTask == nil { + return nil, fmt.Errorf("producerTask task %s not found", producerTaskUniqueName) + } + outputKey := spec.GetTaskOutputParameter().GetOutputParameterKey() + outputs := producerTask.GetOutputs().GetParameters() + outputIO, err := findParameterByProducerKeyInList(outputKey, producerTask.GetName(), outputs) + if err != nil { + return nil, err + } + if outputIO == nil { + return nil, fmt.Errorf("output parameter %s not found", outputKey) + } + return outputIO, nil +} + +func resolveParameterComponentInputParameter( + opts common.Options, + paramSpec *pipelinespec.TaskInputsSpec_InputParameterSpec, + inputParams []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter, +) (*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter, error) { + paramName := paramSpec.GetComponentInputParameter() + if paramName == "" { + return nil, paramError(paramSpec, fmt.Errorf("empty component input")) + } + for _, param := range inputParams { + generateName := param.ParameterKey + if paramName == generateName { + if !common.IsLoopArgument(paramName) { + // This can occur when a runtime config has a "None" optional value. + // In this case we return "nil" and have the callee handle the + // ErrResolvedParameterNull case. + val := param.GetValue() + if val == nil { + return nil, ErrResolvedParameterNull + } + if _, isNull := val.GetKind().(*structpb.Value_NullValue); isNull { + return nil, ErrResolvedParameterNull + } + return param, nil + } + // If the input is a loop argument, we need to check if the iteration index matches the current iteration. + if param.Producer != nil && param.Producer.Iteration != nil && *param.Producer.Iteration == int64(opts.IterationIndex) { + return param, nil + } + } + } + return nil, ErrResolvedParameterNull +} + +// resolveParameterIterator handles parameter Iterator Input resolution +func resolveParameterIterator( + opts common.Options, + parameters []ParameterMetadata, +) ([]ParameterMetadata, *int, error) { + var value *structpb.Value + var iteratorInputDefinitionKey string + var iterator = opts.Task.GetParameterIterator() + switch iterator.GetItems().GetKind().(type) { + case *pipelinespec.ParameterIteratorSpec_ItemsSpec_InputParameter: + // This should be the key input into the for loop task + iteratorInputDefinitionKey = iterator.GetItemInput() + // Used to look up the Parameter from the resolved list + // The key here should map to a ParameterMetadata.Key that + // was resolved in the prior loop. + sourceInputParameterKey := iterator.GetItems().GetInputParameter() + + // Determine if the parameter is a parameter or an artifact + + var err error + parameterIO, err := findParameterByIOKey(sourceInputParameterKey, parameters) + if err != nil { + return nil, nil, err + } + value = parameterIO.GetValue() + case *pipelinespec.ParameterIteratorSpec_ItemsSpec_Raw: + valueRaw := iterator.GetItems().GetRaw() + var unmarshalledRaw interface{} + err := json.Unmarshal([]byte(valueRaw), &unmarshalledRaw) + if err != nil { + return nil, nil, fmt.Errorf("error unmarshall raw string: %q", err) + } + value, err = structpb.NewValue(unmarshalledRaw) + if err != nil { + return nil, nil, fmt.Errorf("error converting unmarshalled raw string into protobuf Value type: %q", err) + } + iteratorInputDefinitionKey = iterator.GetItemInput() + + default: + return nil, nil, fmt.Errorf("cannot find parameter iterator") + } + + items, err := getItems(value) + if err != nil { + return nil, nil, err + } + + var parameterMetadataList []ParameterMetadata + for i, item := range items { + pm := ParameterMetadata{ + Key: iteratorInputDefinitionKey, + ParameterIO: &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: item, + Type: apiv2beta1.IOType_ITERATOR_INPUT, + ParameterKey: iteratorInputDefinitionKey, + Producer: &apiv2beta1.IOProducer{ + TaskName: opts.TaskName, + Iteration: util.Int64Pointer(int64(i)), + }, + }, + ParameterIterator: iterator, + } + parameterMetadataList = append(parameterMetadataList, pm) + } + count := len(items) + return parameterMetadataList, &count, nil +} + +func resolveTaskFinalStatus(opts common.Options, + spec *pipelinespec.TaskInputsSpec_InputParameterSpec, +) (*structpb.Value, error) { + tasks, err := getSubTasks(opts.ParentTask, opts.Run.Tasks, nil) + if err != nil { + return nil, err + } + if tasks == nil { + return nil, fmt.Errorf("failed to get sub tasks for task %s", opts.ParentTask.Name) + } + producerTaskAmbiguousName := spec.GetTaskFinalStatus().GetProducerTask() + if producerTaskAmbiguousName == "" { + return nil, fmt.Errorf("producerTask task cannot be empty") + } + producerTaskUniqueName := getTaskNameWithTaskID(producerTaskAmbiguousName, opts.ParentTask.GetTaskId()) + producer, ok := tasks[producerTaskUniqueName] + + if len(opts.Task.DependentTasks) == 0 { + return nil, fmt.Errorf("task %v has no dependent tasks", opts.Task.TaskInfo.GetName()) + } + if !ok { + return nil, fmt.Errorf("producer task, %v, not in tasks", producer.GetName()) + } + + finalStatus := pipelinespec.PipelineTaskFinalStatus{ + State: producer.GetState().String(), + PipelineTaskName: producer.GetName(), + PipelineJobResourceName: opts.RunName, + Error: &status.Status{ + Message: producer.GetStatusMetadata().GetMessage(), + Code: int32(producer.GetState().Number()), + }, + } + finalStatusJSON, err := protojson.Marshal(&finalStatus) + if err != nil { + return nil, fmt.Errorf("failed to marshal PipelineTaskFinalStatus: %w", err) + } + + var finalStatusMap map[string]interface{} + if err := json.Unmarshal(finalStatusJSON, &finalStatusMap); err != nil { + return nil, fmt.Errorf("failed to unmarshal JSON of PipelineTaskFinalStatus: %w", err) + } + + finalStatusStruct, err := structpb.NewStruct(finalStatusMap) + if err != nil { + return nil, fmt.Errorf("failed to create structpb.Struct: %w", err) + } + + return structpb.NewStructValue(finalStatusStruct), nil +} + +// getItems iteration items from a structpb.Value. +// Return value may be +// * a list of JSON serializable structs +// * a list of structpb.Value +func getItems(value *structpb.Value) (items []*structpb.Value, err error) { + switch v := value.GetKind().(type) { + case *structpb.Value_ListValue: + return v.ListValue.GetValues(), nil + case *structpb.Value_StringValue: + listValue := structpb.Value{} + if err = listValue.UnmarshalJSON([]byte(v.StringValue)); err != nil { + return nil, err + } + return listValue.GetListValue().GetValues(), nil + default: + return nil, fmt.Errorf("value of type %T cannot be iterated", v) + } +} + +// ToListValue will convert []*structpb.Value to a *structpb.Value_ListValue +func ToListValue(items []*structpb.Value) *structpb.Value { + listValue := structpb.Value{} + listValue.Kind = &structpb.Value_ListValue{ + ListValue: &structpb.ListValue{ + Values: items, + }, + } + return &listValue +} + +// ResolveParameterOrPipelineChannel resolves a parameter or pipeline channel value using the executor input. +func ResolveParameterOrPipelineChannel(parameterValueOrPipelineChannel string, executorInput *pipelinespec.ExecutorInput) (string, error) { + isMatch, isPipelineChannel, paramName := common.ParsePipelineParam(parameterValueOrPipelineChannel) + if isMatch && isPipelineChannel { + value, ok := executorInput.GetInputs().GetParameterValues()[paramName] + if !ok { + return "", fmt.Errorf("pipeline channel %s not found in executorinput", parameterValueOrPipelineChannel) + } + return value.GetStringValue(), nil + } + + return parameterValueOrPipelineChannel, nil +} + +func ResolveK8sJSONParameter[k8sResource any]( + opts common.Options, + parameter *pipelinespec.TaskInputsSpec_InputParameterSpec, + params []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter, + res *k8sResource, +) error { + + resolvedParam, _, err := ResolveInputParameter(opts, parameter, params) + if err != nil { + return fmt.Errorf("failed to resolve k8s parameter: %w", err) + } + if resolvedParam == nil || resolvedParam.GetValue() == nil || resolvedParam.GetValue().GetStructValue() == nil { + return fmt.Errorf("resolved k8s parameter is nil") + } + paramJSON, err := resolvedParam.GetValue().GetStructValue().MarshalJSON() + if err != nil { + return err + } + err = json.Unmarshal(paramJSON, &res) + if err != nil { + return fmt.Errorf("failed to unmarshal k8s Resource json "+ + "ensure that k8s Resource json correctly adheres to its respective k8s spec: %w", err) + } + return nil +} + +// ResolveInputParameterStr is like ResolveInputParameter but returns an error if the resolved value is not a non-empty +// string. +func ResolveInputParameterStr( + opts common.Options, + parameter *pipelinespec.TaskInputsSpec_InputParameterSpec, + params []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter) (*structpb.Value, error) { + + val, _, err := ResolveInputParameter(opts, parameter, params) + if err != nil || val == nil || val.GetValue() == nil { + return nil, fmt.Errorf("failed to resolve input parameter. Error: %w", err) + } + if typedVal, ok := val.GetValue().GetKind().(*structpb.Value_StringValue); ok && typedVal != nil { + if typedVal.StringValue == "" { + return nil, fmt.Errorf("resolving input parameter with spec %s. Expected a non-empty string", parameter.String()) + } + } else { + return nil, fmt.Errorf( + "resolving input parameter with spec %s. Expected a string but got: %T", + parameter.String(), + val.GetValue().GetKind(), + ) + } + + return val.GetValue(), nil +} + +func findParameterByProducerKeyInList( + producerKey, producerTaskName string, + parametersIO []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter, +) (*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter, error) { + var parameterIOList []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter + for _, parameterIO := range parametersIO { + if parameterIO.GetParameterKey() == producerKey { + parameterIOList = append(parameterIOList, parameterIO) + } + } + if len(parameterIOList) == 0 { + return nil, fmt.Errorf("parameter with producer key %s not found", producerKey) + } + + // This occurs in the parallelFor case, where multiple iterations resulted in the same + // producer key. + isCollection := len(parameterIOList) > 1 + if isCollection { + var parameterValues []*structpb.Value + for _, parameterIO := range parameterIOList { + // Check correctness by validating the type of all parameters + if parameterIO.Type != apiv2beta1.IOType_ITERATOR_OUTPUT { + return nil, fmt.Errorf("encountered a non iterator output that has the same producer key (%s)", producerKey) + } + // Support for an iterator over list of parameters is not supported yet. + parameterValues = append(parameterValues, parameterIO.GetValue()) + } + ioType := apiv2beta1.IOType_COLLECTED_INPUTS + newParameterIO := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: ToListValue(parameterValues), + Type: ioType, + ParameterKey: producerKey, + Producer: &apiv2beta1.IOProducer{ + TaskName: producerTaskName, + }, + } + return newParameterIO, nil + } + return parameterIOList[0], nil +} diff --git a/backend/src/v2/driver/resolver/resolve.go b/backend/src/v2/driver/resolver/resolve.go new file mode 100644 index 00000000000..c1bf9e20fa9 --- /dev/null +++ b/backend/src/v2/driver/resolver/resolve.go @@ -0,0 +1,105 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resolver + +import ( + "context" + "errors" + "fmt" + + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + apiV2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/v2/driver/common" +) + +var paramError = func(paramSpec *pipelinespec.TaskInputsSpec_InputParameterSpec, err error) error { + return fmt.Errorf("resolving input parameter with spec %s: %w", paramSpec, err) +} + +var ErrResolvedParameterNull = errors.New("the resolved input is null") + +type ParameterMetadata struct { + // This is the key of the parameter in this task's inputs. + Key string + ParameterIO *apiV2beta1.PipelineTaskDetail_InputOutputs_IOParameter + InputParameterSpec *pipelinespec.TaskInputsSpec_InputParameterSpec + ParameterIterator *pipelinespec.ParameterIteratorSpec +} + +type ArtifactMetadata struct { + Key string + // InputArtifactSpec is mutually exclusive with ArtifactIterator + InputArtifactSpec *pipelinespec.TaskInputsSpec_InputArtifactSpec + ArtifactIterator *pipelinespec.ArtifactIteratorSpec + ArtifactIO *apiV2beta1.PipelineTaskDetail_InputOutputs_IOArtifact +} + +// InputMetadata collects artifacts and parameters as arrays because +// the "key" is not unique in the case of Iterator parameters. +type InputMetadata struct { + Parameters []ParameterMetadata + Artifacts []ArtifactMetadata +} + +func ResolveInputs(ctx context.Context, opts common.Options) (*InputMetadata, *int, error) { + inputMetadata := &InputMetadata{ + Parameters: []ParameterMetadata{}, + Artifacts: []ArtifactMetadata{}, + } + + // Handle parameters + resolvedParameters, err := resolveParameters(opts) + if err != nil { + return nil, nil, err + } + inputMetadata.Parameters = resolvedParameters + + // Handle Artifacts + resolvedArtifacts, err := resolveArtifacts(opts) + if err != nil { + return nil, nil, err + } + inputMetadata.Artifacts = resolvedArtifacts + + // Note that we can only have one of the two. + var iterationCount *int + artifactIterator := opts.Task.GetArtifactIterator() + parameterIterator := opts.Task.GetParameterIterator() + switch { + case parameterIterator != nil && artifactIterator != nil: + return nil, nil, errors.New("cannot have both parameter and artifact iterators") + case parameterIterator != nil: + pm, count, err := resolveParameterIterator(opts, inputMetadata.Parameters) + if err != nil { + return nil, nil, err + } + if len(pm) == 0 { + return nil, nil, fmt.Errorf("parameter iterator is empty") + } + iterationCount = count + inputMetadata.Parameters = append(inputMetadata.Parameters, pm...) + case artifactIterator != nil: + am, count, err := resolveArtifactIterator(opts, inputMetadata.Artifacts) + if err != nil { + return nil, nil, err + } + if len(am) == 0 { + return nil, nil, fmt.Errorf("artifact iterator is empty") + } + iterationCount = count + inputMetadata.Artifacts = append(inputMetadata.Artifacts, am...) + } + return inputMetadata, iterationCount, nil +} diff --git a/backend/src/v2/driver/resolver/util.go b/backend/src/v2/driver/resolver/util.go new file mode 100644 index 00000000000..669970e2d16 --- /dev/null +++ b/backend/src/v2/driver/resolver/util.go @@ -0,0 +1,45 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resolver + +import ( + "fmt" + + apiV2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" +) + +func findParameterByIOKey( + key string, + pms []ParameterMetadata, +) (*apiV2beta1.PipelineTaskDetail_InputOutputs_IOParameter, error) { + for _, pm := range pms { + if pm.ParameterIO.GetParameterKey() == key { + return pm.ParameterIO, nil + } + } + return nil, fmt.Errorf("parameter not found") +} + +func findArtifactByIOKey( + key string, + ams []ArtifactMetadata, +) (*apiV2beta1.PipelineTaskDetail_InputOutputs_IOArtifact, error) { + for _, am := range ams { + if am.Key == key { + return am.ArtifactIO, nil + } + } + return nil, fmt.Errorf("artifact not found") +} diff --git a/backend/src/v2/driver/root_dag.go b/backend/src/v2/driver/root_dag.go index c017fcd7bbf..b28441908ca 100644 --- a/backend/src/v2/driver/root_dag.go +++ b/backend/src/v2/driver/root_dag.go @@ -1,17 +1,3 @@ -// Copyright 2025 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - package driver import ( @@ -20,118 +6,77 @@ import ( "fmt" "github.com/golang/glog" - "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" - "github.com/kubeflow/pipelines/backend/src/common/util" - "github.com/kubeflow/pipelines/backend/src/v2/config" - "github.com/kubeflow/pipelines/backend/src/v2/metadata" - "github.com/kubeflow/pipelines/backend/src/v2/objectstore" - "k8s.io/client-go/kubernetes" + apiV2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/v2/client_manager" + "github.com/kubeflow/pipelines/backend/src/v2/driver/common" + "google.golang.org/protobuf/types/known/timestamppb" ) -func validateRootDAG(opts Options) (err error) { - defer func() { - if err != nil { - err = fmt.Errorf("invalid root DAG driver args: %w", err) - } - }() - if opts.PipelineName == "" { - return fmt.Errorf("pipeline name is required") - } - if opts.RunID == "" { - return fmt.Errorf("KFP run ID is required") - } - if opts.Component == nil { - return fmt.Errorf("component spec is required") - } - if opts.RuntimeConfig == nil { - return fmt.Errorf("runtime config is required") - } - if opts.Namespace == "" { - return fmt.Errorf("namespace is required") - } - if opts.Task.GetTaskInfo().GetName() != "" { - return fmt.Errorf("task spec is unnecessary") - } - if opts.DAGExecutionID != 0 { - return fmt.Errorf("DAG execution ID is unnecessary") - } - if opts.Container != nil { - return fmt.Errorf("container spec is unnecessary") - } - if opts.IterationIndex >= 0 { - return fmt.Errorf("iteration index is unnecessary") - } - return nil -} - -func RootDAG(ctx context.Context, opts Options, mlmd *metadata.Client) (execution *Execution, err error) { +// RootDAG handles initial root dag task creation +// and runtime parameter resolution. +func RootDAG(ctx context.Context, opts common.Options, clientManager client_manager.ClientManagerInterface) (execution *Execution, err error) { defer func() { if err != nil { - err = fmt.Errorf("driver.RootDAG(%s) failed: %w", opts.info(), err) + err = fmt.Errorf("driver.RootDAG(%s) failed: %w", opts.Info(), err) } }() - b, _ := json.Marshal(opts) - glog.V(4).Info("RootDAG opts: ", string(b)) - err = validateRootDAG(opts) - if err != nil { - return nil, err - } - // TODO(v2): in pipeline spec, rename GCS output directory to pipeline root. - pipelineRoot := opts.RuntimeConfig.GetGcsOutputDirectory() - - restConfig, err := util.GetKubernetesConfig() - if err != nil { - return nil, fmt.Errorf("failed to initialize kubernetes client: %w", err) - } - k8sClient, err := kubernetes.NewForConfig(restConfig) - if err != nil { - return nil, fmt.Errorf("failed to initialize kubernetes client set: %w", err) - } - cfg, err := config.FromConfigMap(ctx, k8sClient, opts.Namespace) - if err != nil { - return nil, err - } - storeSessionInfo := objectstore.SessionInfo{} - if pipelineRoot != "" { - glog.Infof("PipelineRoot=%q", pipelineRoot) - } else { - pipelineRoot = cfg.DefaultPipelineRoot() - glog.Infof("PipelineRoot=%q from default config", pipelineRoot) - } - storeSessionInfo, err = cfg.GetStoreSessionInfo(pipelineRoot) + b, err := json.Marshal(opts) if err != nil { return nil, err } - storeSessionInfoJSON, err := json.Marshal(storeSessionInfo) - if err != nil { + glog.V(4).Info("RootDAG opts: ", string(b)) + if err = validateRootDAG(opts); err != nil { return nil, err } - storeSessionInfoStr := string(storeSessionInfoJSON) - // TODO(Bobgy): fill in run resource. - pipeline, err := mlmd.GetPipeline(ctx, opts.PipelineName, opts.RunID, opts.Namespace, "run-resource", pipelineRoot, storeSessionInfoStr) - if err != nil { - return nil, err + if clientManager == nil { + return nil, fmt.Errorf("api client is nil") } - executorInput := &pipelinespec.ExecutorInput{ - Inputs: &pipelinespec.ExecutorInput_Inputs{ - ParameterValues: opts.RuntimeConfig.GetParameterValues(), + // Build minimal PipelineTaskDetail for root DAG task under the run. + // Inputs: pass runtime parameters into task inputs for record. + var inputs *apiV2beta1.PipelineTaskDetail_InputOutputs + if opts.RuntimeConfig != nil && opts.RuntimeConfig.GetParameterValues() != nil { + params := make([]*apiV2beta1.PipelineTaskDetail_InputOutputs_IOParameter, 0, len(opts.RuntimeConfig.GetParameterValues())) + for name, val := range opts.RuntimeConfig.GetParameterValues() { + n := name + params = append(params, &apiV2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + ParameterKey: n, + Value: val, + Type: apiV2beta1.IOType_RUNTIME_VALUE_INPUT, + Producer: &apiV2beta1.IOProducer{ + TaskName: "ROOT", + }, + }) + } + inputs = &apiV2beta1.PipelineTaskDetail_InputOutputs{Parameters: params} + } + scopePath := opts.ScopePath.StringPath() + pd := &apiV2beta1.PipelineTaskDetail{ + Name: "ROOT", + DisplayName: opts.RunDisplayName, + RunId: opts.Run.GetRunId(), + Type: apiV2beta1.PipelineTaskDetail_ROOT, + Inputs: inputs, + TypeAttributes: &apiV2beta1.PipelineTaskDetail_TypeAttributes{}, + State: apiV2beta1.PipelineTaskDetail_RUNNING, + ScopePath: scopePath, + StartTime: timestamppb.Now(), + CreateTime: timestamppb.Now(), + Pods: []*apiV2beta1.PipelineTaskDetail_TaskPod{ + { + Name: opts.PodName, + Uid: opts.PodUID, + Type: apiV2beta1.PipelineTaskDetail_DRIVER, + }, }, } - // TODO(Bobgy): validate executorInput matches component spec types - ecfg, err := metadata.GenerateExecutionConfig(executorInput) + task, err := clientManager.KFPAPIClient().CreateTask(ctx, &apiV2beta1.CreateTaskRequest{Task: pd}) if err != nil { return nil, err } - ecfg.ExecutionType = metadata.DagExecutionTypeName - ecfg.Name = fmt.Sprintf("run/%s", opts.RunID) - exec, err := mlmd.CreateExecution(ctx, pipeline, ecfg) - if err != nil { - return nil, err + execution = &Execution{ + TaskID: task.TaskId, } - glog.Infof("Created execution: %s", exec) - // No need to return ExecutorInput, because tasks in the DAG will resolve - // needed info from MLMD. - return &Execution{ID: exec.GetID()}, nil + return execution, nil } diff --git a/backend/src/v2/driver/setup_test.go b/backend/src/v2/driver/setup_test.go new file mode 100644 index 00000000000..c066abc5dfb --- /dev/null +++ b/backend/src/v2/driver/setup_test.go @@ -0,0 +1,922 @@ +// Copyright 2025 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package driver + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/google/uuid" + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + apiv2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/config/proxy" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/backend/src/v2/apiclient/kfpapi" + clientmanager "github.com/kubeflow/pipelines/backend/src/v2/client_manager" + "github.com/kubeflow/pipelines/backend/src/v2/component" + "github.com/kubeflow/pipelines/backend/src/v2/driver/common" + "github.com/kubeflow/pipelines/kubernetes_platform/go/kubernetesplatform" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/types/known/structpb" + "k8s.io/client-go/kubernetes/fake" +) + +const TestPipelineName = "test-pipeline" +const TestNamespace = "test-namespace" + +type TestContext struct { + Run *apiv2beta1.Run + util.ScopePath + T *testing.T + PipelineSpec *pipelinespec.PipelineSpec + RootTask *apiv2beta1.PipelineTaskDetail + PlatformSpec *pipelinespec.PlatformSpec + ClientManager clientmanager.ClientManagerInterface + MockAPI *kfpapi.MockAPI + MockObjStore *component.MockObjectStoreClient // Shared across all launchers in this test context +} + +// NewTestContextWithRootExecuted creates a new test context with basic configuration +// It will automatically launch a root DAG using the provided input +// and update the scope path. +// When using a Test Context note the following: +// - Output parameters, artifacts and artifact-tasks are not auto created +// and must be manually created by using mock launcher calls. This +// includes outputs for upstream dags, since we expect launcher->api server +// to handle these. +// - All other input resolutions are handled automatically. +// - ScopePath is automatically pushed/popped for container calls. +// - ScopePath is automatically updated for dags upon entering a dag (push), +// but not exiting the dag (pop), this should be handled by the caller +// via tc.ExitDag() +// - When Updating TextContext, ensure for any driver executions the Run +// object is refreshed using RefreshRun() otherwise you might use stale data. +func NewTestContextWithRootExecuted(t *testing.T, runtimeConfig *pipelinespec.PipelineJob_RuntimeConfig, pipelinePath string) *TestContext { + t.Helper() + proxy.InitializeConfigWithEmptyForTests() + mockAPI := kfpapi.NewMockAPI() + tc := &TestContext{ + ClientManager: clientmanager.NewFakeClientManager(fake.NewClientset(), mockAPI), + MockAPI: mockAPI, + MockObjStore: component.NewMockObjectStoreClient(), // Shared object store + } + + // Load pipeline spec + pipelineSpec, platformSpec, err := util.LoadPipelineAndPlatformSpec(pipelinePath) + require.NoError(t, err) + require.NotNil(t, platformSpec) + require.NotNil(t, pipelineSpec) + + // Convert pipelineSpec to structpb.Struct + pipelineSpecJSON, err := protojson.Marshal(pipelineSpec) + require.NoError(t, err) + pipelineSpecStruct := &structpb.Struct{} + err = protojson.Unmarshal(pipelineSpecJSON, pipelineSpecStruct) + require.NoError(t, err) + + // Set up scope path + tc.ScopePath, err = util.NewScopePathFromStruct(pipelineSpecStruct) + require.NoError(t, err) + tc.PipelineSpec = pipelineSpec + tc.PlatformSpec = platformSpec + + // Create a test run + run := tc.CreateTestRun(t, "test-pipeline") + require.NotNil(t, run) + + tc.Run = run + tc.T = t + + // Create a root DAG execution using basic inputs + _, rootTask := tc.RunRootDag(tc, run, runtimeConfig) + tc.RootTask = rootTask + return tc +} + +// CreateTestRun creates a test run with basic configuration +func (tc *TestContext) CreateTestRun(t *testing.T, pipelineName string) *apiv2beta1.Run { + t.Helper() + + // Convert the loaded pipeline spec to structpb.Struct for the run + // Marshal to JSON and then unmarshal into structpb + pipelineSpecJSON, err := protojson.Marshal(tc.PipelineSpec) + require.NoError(t, err) + pipelineSpecStruct := &structpb.Struct{} + err = protojson.Unmarshal(pipelineSpecJSON, pipelineSpecStruct) + require.NoError(t, err) + + uuid, _ := uuid.NewRandom() + run := &apiv2beta1.Run{ + RunId: uuid.String(), + DisplayName: fmt.Sprintf("test-run-%s-%d", pipelineName, time.Now().Unix()), + PipelineSource: &apiv2beta1.Run_PipelineSpec{PipelineSpec: pipelineSpecStruct}, + RuntimeConfig: &apiv2beta1.RuntimeConfig{}, + State: apiv2beta1.RuntimeState_RUNNING, + } + + tc.MockAPI.AddRun(run) + return run +} + +// CreateTestTask creates a test task with the given configuration +func (tc *TestContext) CreateTestTask( + t *testing.T, + runID, + taskName string, + taskType apiv2beta1.PipelineTaskDetail_TaskType, + inputParams, outputParams []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter, +) *apiv2beta1.PipelineTaskDetail { + t.Helper() + + podUUID, _ := uuid.NewRandom() + task := &apiv2beta1.PipelineTaskDetail{ + Name: taskName, + DisplayName: taskName, + RunId: runID, + Type: taskType, + State: apiv2beta1.PipelineTaskDetail_RUNNING, + Pods: []*apiv2beta1.PipelineTaskDetail_TaskPod{ + { + Name: fmt.Sprintf("%s-pod", taskName), + Uid: podUUID.String(), + Type: apiv2beta1.PipelineTaskDetail_DRIVER, + }, + }, + Inputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{ + Parameters: inputParams, + }, + Outputs: &apiv2beta1.PipelineTaskDetail_InputOutputs{ + Parameters: outputParams, + }, + } + + createdTask, err := tc.ClientManager.KFPAPIClient().CreateTask(context.Background(), &apiv2beta1.CreateTaskRequest{ + Task: task, + }) + require.NoError(t, err) + return createdTask +} + +// CreateTestArtifact creates a test artifact with the given configuration +func (tc *TestContext) CreateTestArtifact(t *testing.T, name, artifactType string) *apiv2beta1.Artifact { + t.Helper() + + artifact := &apiv2beta1.Artifact{ + Name: name, + Type: apiv2beta1.Artifact_Dataset, // Default type + } + + // Set specific type if provided + switch artifactType { + case "model": + artifact.Type = apiv2beta1.Artifact_Model + case "metric": + artifact.Type = apiv2beta1.Artifact_Metric + } + + createdArtifact, err := tc.ClientManager.KFPAPIClient().CreateArtifact(context.Background(), &apiv2beta1.CreateArtifactRequest{ + Artifact: artifact, + }) + require.NoError(t, err) + return createdArtifact +} + +// CreateTestArtifactTask creates an artifact-task relationship +func (tc *TestContext) CreateTestArtifactTask(t *testing.T, artifactID, taskID, runID, key string, + producer *apiv2beta1.IOProducer, artifactType apiv2beta1.IOType) *apiv2beta1.ArtifactTask { + t.Helper() + + artifactTask := &apiv2beta1.ArtifactTask{ + ArtifactId: artifactID, + TaskId: taskID, + RunId: runID, + Type: artifactType, + Producer: producer, + Key: key, + } + + createdArtifactTask, err := tc.ClientManager.KFPAPIClient().CreateArtifactTask(context.Background(), &apiv2beta1.CreateArtifactTaskRequest{ + ArtifactTask: artifactTask, + }) + require.NoError(t, err) + return createdArtifactTask +} + +// CreateParameter creates a test parameter with the given name and value +func CreateParameter(value, key string, + producer *apiv2beta1.IOProducer) *apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter { + val, _ := structpb.NewValue(value) + param := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: val, + ParameterKey: key, + Producer: producer, + } + return param +} + +// Example test demonstrating the usage including artifact population +func TestTestContext(t *testing.T) { + // Setup test environment + testSetup := NewTestContextWithRootExecuted(t, &pipelinespec.PipelineJob_RuntimeConfig{}, "test_data/taskOutputArtifact_test.yaml") + require.NotNil(t, testSetup) + assert.NotEmpty(t, testSetup.Run.RunId) + + // Create a test run + run := testSetup.CreateTestRun(t, "test-pipeline") + assert.NotNil(t, run) + assert.NotEmpty(t, run.RunId) + assert.Equal(t, "primary-pipeline", run.GetPipelineSpec().Fields["pipelineInfo"].GetStructValue().Fields["name"].GetStringValue()) + + // Create test tasks + task1 := testSetup.CreateTestTask(t, + run.RunId, + "producer-task", + apiv2beta1.PipelineTaskDetail_RUNTIME, + []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + CreateParameter( + "input1", + "pipelinechannel--args-generator-op-Output", + &apiv2beta1.IOProducer{TaskName: "some-task"}, + ), + }, + []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + CreateParameter( + "output1", + "msg", + nil, + ), + CreateParameter( + "output2", + "", + nil, + ), + }) + task2 := testSetup.CreateTestTask(t, run.RunId, "consumer-task", apiv2beta1.PipelineTaskDetail_RUNTIME, + []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + CreateParameter( + "input4", + "input4key", + nil, + ), + }, + []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + CreateParameter( + "output3", + "pipelinechannel--split-ids-Output", + nil, + ), + }) + + // Create test artifacts + artifact1 := testSetup.CreateTestArtifact(t, "output-data", "dataset") + artifact2 := testSetup.CreateTestArtifact(t, "trained-model", "model") + + // Create artifact-task relationships + // task1 produces artifact1 (output) + testSetup.CreateTestArtifactTask(t, + artifact1.ArtifactId, task1.TaskId, run.RunId, "pipelinechannel--loop_parameter-loop-item-1", + &apiv2beta1.IOProducer{ + TaskName: task1.Name, + }, + apiv2beta1.IOType_OUTPUT, + ) + + // task2 consumes artifact1 (input) + testSetup.CreateTestArtifactTask(t, + artifact1.ArtifactId, task2.TaskId, run.RunId, "pipelinechannel--loop_parameter-loop-item-2", + &apiv2beta1.IOProducer{ + TaskName: task1.Name, + }, + apiv2beta1.IOType_COMPONENT_INPUT, + ) + // task2 produces artifact2 (output) + testSetup.CreateTestArtifactTask(t, + artifact2.ArtifactId, task2.TaskId, run.RunId, "pipelinechannel--loop_parameter-loop-item", + &apiv2beta1.IOProducer{ + TaskName: task2.Name, + }, + apiv2beta1.IOType_OUTPUT, + ) + + // Test getting run with populated tasks and artifacts + fullView := apiv2beta1.GetRunRequest_FULL + populatedRun, err := testSetup.ClientManager.KFPAPIClient().GetRun(context.Background(), &apiv2beta1.GetRunRequest{RunId: run.RunId, View: &fullView}) + require.NoError(t, err) + assert.NotNil(t, populatedRun) + assert.Len(t, populatedRun.Tasks, 2) + + // Verify task1 has correct artifacts (1 output) + var producerTask *apiv2beta1.PipelineTaskDetail + for _, task := range populatedRun.Tasks { + if task.Name == "producer-task" { + producerTask = task + break + } + } + require.NotNil(t, producerTask) + assert.Len(t, producerTask.Inputs.Artifacts, 0) // No input artifacts + assert.Len(t, producerTask.Outputs.Artifacts, 1) // 1 output artifact + + // Verify task2 has correct artifacts (1 input, 1 output) + var consumerTask *apiv2beta1.PipelineTaskDetail + for _, task := range populatedRun.Tasks { + if task.Name == "consumer-task" { + consumerTask = task + break + } + } + require.NotNil(t, consumerTask) + assert.Len(t, consumerTask.Inputs.Artifacts, 1) // 1 input artifact + assert.Len(t, consumerTask.Outputs.Artifacts, 1) // 1 output artifact + + // Verify producer information is correctly set + inputArtifact := consumerTask.Inputs.Artifacts[0] + assert.Equal(t, "producer-task", inputArtifact.GetProducer().TaskName) + assert.Equal(t, "pipelinechannel--loop_parameter-loop-item-2", inputArtifact.GetArtifactKey()) +} + +func (tc *TestContext) RunRootDag(testSetup *TestContext, run *apiv2beta1.Run, runtimeConfig *pipelinespec.PipelineJob_RuntimeConfig) (*Execution, *apiv2beta1.PipelineTaskDetail) { + tc.RefreshRun() + defer tc.RefreshRun() + err := tc.Push("root") + require.NoError(tc.T, err) + + opts := common.Options{ + PipelineName: TestPipelineName, + Run: run, + Component: tc.ScopePath.GetLast().GetComponentSpec(), + ParentTask: nil, + IterationIndex: -1, + RuntimeConfig: runtimeConfig, + Namespace: TestNamespace, + Task: nil, + Container: nil, + KubernetesExecutorConfig: &kubernetesplatform.KubernetesExecutorConfig{}, + PipelineLogLevel: "1", + PublishLogs: "false", + CacheDisabled: false, + DriverType: "ROOT_DAG", + TaskName: "", // Empty for root driver + PodName: "system-dag-driver", + PodUID: "some-uid", + ScopePath: tc.ScopePath, + } + // Execute RootDAG + execution, err := RootDAG(context.Background(), opts, testSetup.ClientManager) + require.NoError(tc.T, err) + require.NotNil(tc.T, execution) + + task, err := tc.ClientManager.KFPAPIClient().GetTask(context.Background(), &apiv2beta1.GetTaskRequest{TaskId: execution.TaskID}) + require.NoError(tc.T, err) + require.NotNil(tc.T, task) + require.Equal(tc.T, execution.TaskID, task.TaskId) + + return execution, task +} + +func (tc *TestContext) RunDagDriver( + taskName string, + parentTask *apiv2beta1.PipelineTaskDetail) (*Execution, *apiv2beta1.PipelineTaskDetail) { + t := tc.T + tc.RefreshRun() + defer tc.RefreshRun() + + err := tc.Push(taskName) + require.NoError(t, err) + taskSpec := tc.GetLast().GetTaskSpec() + + opts := tc.setupDagOptions(parentTask, taskSpec, nil) + + execution, err := DAG(context.Background(), opts, tc.ClientManager) + require.NoError(t, err) + require.NotNil(t, execution) + + task, err := tc.ClientManager.KFPAPIClient().GetTask(context.Background(), &apiv2beta1.GetTaskRequest{TaskId: execution.TaskID}) + require.NoError(t, err) + require.NotNil(t, task) + require.Equal(t, execution.TaskID, task.TaskId) + require.Equal(t, taskName, task.GetName()) + + return execution, task +} + +// RunContainerDriver runs a container for the given task. +// If autoUpdateScope is true, the scope path will +// be popped after the container is completed. +func (tc *TestContext) RunContainerDriver( + taskName string, + parentTask *apiv2beta1.PipelineTaskDetail, + iterationIndex *int64, + autoUpdateScope bool, +) (*Execution, *apiv2beta1.PipelineTaskDetail) { + tc.RefreshRun() + defer tc.RefreshRun() + + // Add scope path and pop it once done + err := tc.Push(taskName) + + if autoUpdateScope { + defer func() { + _, ok := tc.Pop() + require.True(tc.T, ok) + }() + } + + require.NoError(tc.T, err) + taskSpec := tc.GetLast().GetTaskSpec() + + kubernetesExecutorConfig, err := util.LoadKubernetesExecutorConfig(tc.GetLast().GetComponentSpec(), tc.PlatformSpec) + require.NoError(tc.T, err) + opts := tc.setupContainerOptions(parentTask, taskSpec, kubernetesExecutorConfig) + + if iterationIndex != nil { + opts.IterationIndex = int(*iterationIndex) + } + + execution, err := Container(context.Background(), opts, tc.ClientManager) + require.NoError(tc.T, err) + require.NotNil(tc.T, execution) + + task, err := tc.ClientManager.KFPAPIClient().GetTask(context.Background(), &apiv2beta1.GetTaskRequest{TaskId: execution.TaskID}) + require.NoError(tc.T, err) + require.NotNil(tc.T, task) + require.Equal(tc.T, execution.TaskID, task.TaskId) + require.Equal(tc.T, taskName, task.GetName()) + if task.State != apiv2beta1.PipelineTaskDetail_CACHED { + // In the case of k8s ops like createpvc/deletepvc + // There are no launchers, so we mark them success + // within driver. + require.True( + tc.T, + task.State == apiv2beta1.PipelineTaskDetail_RUNNING || + task.State == apiv2beta1.PipelineTaskDetail_SUCCEEDED, + "expected task.Status to be RUNNING or SUCCEEDED, got %v", + task.State, + ) + } + + return execution, task +} + +func (tc *TestContext) RefreshRun() { + t := tc.T + fullView := apiv2beta1.GetRunRequest_FULL + run, err := tc.ClientManager.KFPAPIClient().GetRun(context.Background(), &apiv2beta1.GetRunRequest{RunId: tc.Run.RunId, View: &fullView}) + require.NoError(t, err) + tc.Run = run +} + +func (tc *TestContext) ExitDag() { + _, ok := tc.Pop() + require.True(tc.T, ok) +} + +func (tc *TestContext) MockLauncherOutputParameterCreate( + taskID string, + parameterKey string, + value *structpb.Value, + outputType apiv2beta1.IOType, + producerTask string, + producerIteration *int64, +) { + // Get Task + task, err := tc.ClientManager.KFPAPIClient().GetTask(context.Background(), &apiv2beta1.GetTaskRequest{TaskId: taskID}) + require.NoError(tc.T, err) + require.NotNil(tc.T, task) + + newParameter := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: value, + Type: outputType, + ParameterKey: parameterKey, + Producer: &apiv2beta1.IOProducer{ + TaskName: producerTask, + }, + } + if producerIteration != nil { + newParameter.Producer.Iteration = producerIteration + } + parameters := task.Outputs.Parameters + parameters = append(parameters, newParameter) + task.Outputs.Parameters = parameters + // Update Task via kfpAPI UpdateTask + task, err = tc.ClientManager.KFPAPIClient().UpdateTask(context.Background(), &apiv2beta1.UpdateTaskRequest{ + TaskId: taskID, + Task: task, + }) + require.NoError(tc.T, err) + require.NotNil(tc.T, task) + + tc.RefreshRun() +} + +// This helper will update a Runtime Tasks inputs with optional values if +// no upstream input was provided. +func (tc *TestContext) MockLauncherDefaultInputParametersUpdate(taskID string, componentSpec *pipelinespec.ComponentSpec) *apiv2beta1.PipelineTaskDetail { + defer func() { tc.RefreshRun() }() + + // Get Task + task, err := tc.ClientManager.KFPAPIClient().GetTask(context.Background(), &apiv2beta1.GetTaskRequest{TaskId: taskID}) + require.NoError(tc.T, err) + require.NotNil(tc.T, task) + + taskInputParameters := task.GetInputs().GetParameters() + + // Find all optional parameters that have default values and add them to the task input parameters + for key, inputParamSpec := range componentSpec.GetInputDefinitions().GetParameters() { + if !parameterExistsWithKey(taskInputParameters, key) { + var value *structpb.Value + if inputParamSpec.GetDefaultValue() != nil { + value = inputParamSpec.GetDefaultValue() + } else { + require.True(tc.T, inputParamSpec.IsOptional, "Parameter %s is not optional", key) + continue + } + require.NotNil(tc.T, value) + parameterIO := &apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter{ + Value: value, + Type: apiv2beta1.IOType_COMPONENT_DEFAULT_INPUT, + ParameterKey: key, + } + taskInputParameters = append(taskInputParameters, parameterIO) + } + } + task.Inputs.Parameters = taskInputParameters + task, err = tc.ClientManager.KFPAPIClient().UpdateTask(context.Background(), &apiv2beta1.UpdateTaskRequest{ + TaskId: taskID, + Task: task, + }) + require.NoError(tc.T, err) + require.NotNil(tc.T, task) + return task +} + +func parameterExistsWithKey(parameters []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter, key string) bool { + for _, parameter := range parameters { + if parameter.ParameterKey == key { + return true + } + } + return false +} + +func (tc *TestContext) MockLauncherOutputArtifactCreate( + taskID string, + artifactKey string, + artifactType apiv2beta1.Artifact_ArtifactType, + outputType apiv2beta1.IOType, + producerTask string, + producerIteration *int64, +) string { + t := tc.T + artifactID, _ := uuid.NewRandom() + outputArtifact := &apiv2beta1.Artifact{ + ArtifactId: artifactID.String(), + Name: artifactKey, + Type: artifactType, + Uri: util.StringPointer(fmt.Sprintf("s3://some.location/%s", artifactKey)), + Namespace: TestNamespace, + Metadata: map[string]*structpb.Value{ + "display_name": structpb.NewStringValue(artifactKey), + "task_id": structpb.NewStringValue(taskID), + "producer_task": structpb.NewStringValue(producerTask), + }, + } + createArtifact, err := tc.ClientManager.KFPAPIClient().CreateArtifact( + context.Background(), + &apiv2beta1.CreateArtifactRequest{ + Artifact: outputArtifact, + }) + require.NoError(t, err) + require.NotNil(t, createArtifact) + + artifactTask := &apiv2beta1.ArtifactTask{ + ArtifactId: artifactID.String(), + TaskId: taskID, + RunId: tc.Run.GetRunId(), + Key: artifactKey, + Producer: &apiv2beta1.IOProducer{TaskName: producerTask}, + Type: outputType, + } + if producerIteration != nil { + artifactTask.Producer.Iteration = producerIteration + } + at, err := tc.ClientManager.KFPAPIClient().CreateArtifactTask( + context.Background(), + &apiv2beta1.CreateArtifactTaskRequest{ + ArtifactTask: artifactTask, + }) + require.NoError(t, err) + require.NotNil(t, at) + tc.RefreshRun() + return artifactID.String() +} + +func (tc *TestContext) MockLauncherArtifactTaskCreate( + producerTaskName, taskID, key string, + artifactID string, producerIteration *int64, + outputType apiv2beta1.IOType) { + t := tc.T + at := &apiv2beta1.ArtifactTask{ + ArtifactId: artifactID, + TaskId: taskID, + RunId: tc.Run.GetRunId(), + Key: key, + Type: outputType, + Producer: &apiv2beta1.IOProducer{TaskName: producerTaskName}, + } + if producerIteration != nil { + at.Producer.Iteration = producerIteration + } + result, err := tc.ClientManager.KFPAPIClient().CreateArtifactTask( + context.Background(), + &apiv2beta1.CreateArtifactTaskRequest{ArtifactTask: at}) + require.NoError(t, err) + require.NotNil(t, result) + tc.RefreshRun() +} + +// LauncherExecution holds the result of a launcher execution +type LauncherExecution struct { + // The launcher instance (with mocks still attached) + Launcher *component.LauncherV2 + // Mock instances for verification + MockFS *component.MockFileSystem + MockCmd *component.MockCommandExecutor + MockObjStore *component.MockObjectStoreClient + // The task that was executed + Task *apiv2beta1.PipelineTaskDetail +} + +// RunLauncher executes a launcher for the given execution with mocked dependencies. +// This simulates what the launcher would do when executing user code. +// It uses the ExecutorInput that was already prepared by the driver. +// +// Usage: +// +// execution, _ := tc.RunContainerDriver("task-name", parentTask, nil, true) +// launcherExec := tc.RunLauncher(execution, map[string][]byte{ +// "/tmp/outputs/metric": []byte("0.95"), +// }) +// +// // Verify command was executed +// assert.Equal(t, 1, launcherExec.MockCmd.CallCount()) +// +// // Verify artifacts were uploaded +// uploads := launcherExec.MockObjStore.GetUploadCallsForKey("model") +// assert.Len(t, uploads, 1) +func (tc *TestContext) RunLauncher(execution *Execution, outputFiles map[string][]byte, autoUpdateScope bool) *LauncherExecution { + t := tc.T + ctx := context.Background() + + // Get the task that was created by the driver + task, err := tc.ClientManager.KFPAPIClient().GetTask(ctx, &apiv2beta1.GetTaskRequest{TaskId: execution.TaskID}) + require.NoError(t, err) + require.NotNil(t, task) + + // Use the ExecutorInput that was already prepared by the driver + executorInput := execution.ExecutorInput + require.NotNil(t, executorInput, "ExecutorInput should be set by driver") + + // Get componentSpec and taskSpec from the current scope path + // The TestContext's ScopePath should already be at the right location after RunContainerDriver + componentSpec := tc.GetLast().GetComponentSpec() + taskSpec := tc.GetLast().GetTaskSpec() + require.NotNil(t, componentSpec, "Component spec not found") + require.NotNil(t, taskSpec, "Task spec not found") + + // Marshal executor input + executorInputJSON, err := protojson.Marshal(executorInput) + require.NoError(t, err) + + // Create launcher options + var iterPtr *int64 + var parentTaskForLauncher *apiv2beta1.PipelineTaskDetail + if task.ParentTaskId != nil && *task.ParentTaskId != "" { + // Get the parent task + parentTask, err := tc.ClientManager.KFPAPIClient().GetTask(ctx, &apiv2beta1.GetTaskRequest{TaskId: *task.ParentTaskId}) + if err == nil { + parentTaskForLauncher = parentTask + // Extract iteration index from the task's type attributes if this is an iteration + if task.GetTypeAttributes() != nil && task.GetTypeAttributes().IterationIndex != nil { + iterPtr = task.GetTypeAttributes().IterationIndex + } + } + } + + // Convert PipelineSpec to structpb.Struct + pipelineSpecJSON, err := protojson.Marshal(tc.PipelineSpec) + require.NoError(t, err) + pipelineSpecStruct := &structpb.Struct{} + err = protojson.Unmarshal(pipelineSpecJSON, pipelineSpecStruct) + require.NoError(t, err) + + opts := &component.LauncherV2Options{ + Namespace: TestNamespace, + PodName: fmt.Sprintf("%s-pod", task.GetName()), + PodUID: uuid.New().String(), + PipelineName: TestPipelineName, + PublishLogs: "false", + ComponentSpec: componentSpec, + TaskSpec: taskSpec, + ScopePath: tc.ScopePath, + Run: tc.Run, + ParentTask: parentTaskForLauncher, + Task: task, + IterationIndex: iterPtr, + PipelineSpec: pipelineSpecStruct, + } + + // Create launcher with a dummy command (will be mocked anyway) + launcher, err := component.NewLauncherV2( + string(executorInputJSON), + []string{"python", "component.py"}, // Default command, will be mocked + opts, + tc.ClientManager, + ) + require.NoError(t, err) + + // Setup mocks + mockFS := component.NewMockFileSystem() + mockCmd := component.NewMockCommandExecutor() + // Use the shared mock object store from TestContext + mockObjStore := tc.MockObjStore + + // Configure output files + for path, content := range outputFiles { + mockFS.SetFileContent(path, content) + } + + // Set output metadata file if not provided + if _, exists := outputFiles["/tmp/kfp_outputs/output_metadata.json"]; !exists { + mockFS.SetFileContent("/tmp/kfp_outputs/output_metadata.json", []byte("{}")) + } + + // Configure command to succeed by default + mockCmd.RunError = nil + + // Pre-populate input artifacts in mock object store + for _, ioArtifact := range task.GetInputs().GetArtifacts() { + for _, artifact := range ioArtifact.Artifacts { + if artifact.GetUri() != "" { + // Simulate artifact already exists in object store + mockObjStore.SetArtifact(artifact.GetUri(), []byte("input data")) + } + } + } + + // Inject mocks (file system, command executor, object store) + // Note: KFP API client comes from clientManager which already has MockAPI + launcher.WithFileSystem(mockFS). + WithCommandExecutor(mockCmd). + WithObjectStore(mockObjStore) + + // Execute the launcher using the full Execute() method + // This will test the complete flow including: + // - Task output parameter updates + // - Task status updates to SUCCEEDED + // - Status propagation up the DAG hierarchy + err = launcher.Execute(ctx) + require.NoError(t, err, "Launcher execution failed for task %s", task.GetName()) + + require.Equal(t, 1, mockCmd.CallCount()) + + // Refresh the run to get updated task data + tc.RefreshRun() + + // Get updated task + updatedTask, err := tc.ClientManager.KFPAPIClient().GetTask(ctx, &apiv2beta1.GetTaskRequest{TaskId: execution.TaskID}) + require.NoError(t, err) + + // Pop scope if autoUpdateScope is true + if autoUpdateScope { + _, ok := tc.Pop() + require.True(t, ok, "Failed to pop scope path") + } + + return &LauncherExecution{ + Launcher: launcher, + MockFS: mockFS, + MockCmd: mockCmd, + MockObjStore: mockObjStore, + Task: updatedTask, + } +} + +func (tc *TestContext) setupDagOptions( + parentTask *apiv2beta1.PipelineTaskDetail, + taskSpec *pipelinespec.PipelineTaskSpec, + kubernetesExecutorConfig *kubernetesplatform.KubernetesExecutorConfig, +) common.Options { + componentSpec := tc.PipelineSpec.Components[taskSpec.ComponentRef.Name] + + ds := tc.PipelineSpec.GetDeploymentSpec() + platformDeploymentSpec := &pipelinespec.PlatformDeploymentConfig{} + + b, err := protojson.Marshal(ds) + require.NoError(tc.T, err) + err = protojson.Unmarshal(b, platformDeploymentSpec) + require.NoError(tc.T, err) + assert.NotNil(tc.T, platformDeploymentSpec) + + cs := platformDeploymentSpec.Executors[componentSpec.GetExecutorLabel()] + containerExecutorSpec := &pipelinespec.PipelineDeploymentConfig_ExecutorSpec{} + b, err = protojson.Marshal(cs) + require.NoError(tc.T, err) + err = protojson.Unmarshal(b, containerExecutorSpec) + require.NoError(tc.T, err) + assert.NotNil(tc.T, containerExecutorSpec) + + return common.Options{ + PipelineName: TestPipelineName, + Run: tc.Run, + Component: componentSpec, + ParentTask: parentTask, + IterationIndex: -1, + RuntimeConfig: nil, + Namespace: TestNamespace, + Task: taskSpec, + Container: nil, + KubernetesExecutorConfig: kubernetesExecutorConfig, + RunName: "", + RunDisplayName: "", + PipelineLogLevel: "1", + PublishLogs: "false", + CacheDisabled: false, + DriverType: "DAG", + TaskName: taskSpec.TaskInfo.GetName(), + PodName: "system-dag-driver", + PodUID: "some-uid", + ScopePath: tc.ScopePath, + } +} + +func (tc *TestContext) setupContainerOptions( + parentTask *apiv2beta1.PipelineTaskDetail, + taskSpec *pipelinespec.PipelineTaskSpec, + kubernetesExecutorConfig *kubernetesplatform.KubernetesExecutorConfig, +) common.Options { + componentSpec := tc.PipelineSpec.Components[taskSpec.ComponentRef.Name] + + ds := tc.PipelineSpec.GetDeploymentSpec() + platformDeploymentSpec := &pipelinespec.PlatformDeploymentConfig{} + + b, err := protojson.Marshal(ds) + require.NoError(tc.T, err) + err = protojson.Unmarshal(b, platformDeploymentSpec) + require.NoError(tc.T, err) + assert.NotNil(tc.T, platformDeploymentSpec) + + cs := platformDeploymentSpec.Executors[componentSpec.GetExecutorLabel()] + containerExecutorSpec := &pipelinespec.PipelineDeploymentConfig_ExecutorSpec{} + b, err = protojson.Marshal(cs) + require.NoError(tc.T, err) + err = protojson.Unmarshal(b, containerExecutorSpec) + require.NoError(tc.T, err) + assert.NotNil(tc.T, containerExecutorSpec) + + return common.Options{ + PipelineName: TestPipelineName, + Run: tc.Run, + Component: componentSpec, + ParentTask: parentTask, + IterationIndex: -1, + RuntimeConfig: nil, + Namespace: TestNamespace, + Task: taskSpec, + Container: containerExecutorSpec.GetContainer(), + KubernetesExecutorConfig: kubernetesExecutorConfig, + PipelineLogLevel: "1", + PublishLogs: "false", + CacheDisabled: false, + DriverType: "CONTAINER", + TaskName: taskSpec.TaskInfo.GetName(), + PodName: "system-container-impl", + PodUID: "some-uid", + ScopePath: tc.ScopePath, + } +} + +func (tc *TestContext) fetchParameter(key string, params []*apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter) *apiv2beta1.PipelineTaskDetail_InputOutputs_IOParameter { + for _, p := range params { + if key == p.ParameterKey { + return p + } + } + return nil +} diff --git a/backend/src/v2/driver/test_data/cache_test.py b/backend/src/v2/driver/test_data/cache_test.py new file mode 100644 index 00000000000..3faac9b08cc --- /dev/null +++ b/backend/src/v2/driver/test_data/cache_test.py @@ -0,0 +1,51 @@ +import functools + +from kubernetes import client +import kfp +from kfp import dsl +from kfp.dsl import ( + Input, + Output, + Artifact, + Dataset, + component +) + +base_image="quay.io/opendatahub/ds-pipelines-ci-executor-image:v1.0" +dsl.component = functools.partial(dsl.component, base_image=base_image) + +@component +def create_dataset(output_dataset: Output[Dataset]): + with open(output_dataset.path, "w") as f: + f.write('hurricane') + output_dataset.metadata["category"] = 5 + output_dataset.metadata["description"] = "A simple dataset" + +@component +def process_dataset(input_dataset: Input[Dataset], output_artifact: Output[Artifact]): + with open(input_dataset.path, "r") as f: + data = f.read() + assert data == "hurricane" + with open(output_artifact.path, "w") as f: + f.write(f'very_bad') + +@component +def analyze_artifact(data_input: Input[Artifact], output_artifact: Output[Artifact]): + with open(data_input.path, "r") as f: + data = f.read() + assert data == "very_bad" + with open(output_artifact.path, "w") as f: + f.write(f'done_analyzing') + +@dsl.pipeline +def primary_pipeline(): + dataset_op = create_dataset() + processed = process_dataset(input_dataset=dataset_op.outputs["output_dataset"]).set_caching_options(True) + analyze_artifact(data_input=processed.outputs["output_artifact"]) + +if __name__ == '__main__': + from kfp import compiler + + compiler.Compiler().compile( + pipeline_func=primary_pipeline, + package_path=__file__.replace('.py', '.yaml')) diff --git a/backend/src/v2/driver/test_data/cache_test.yaml b/backend/src/v2/driver/test_data/cache_test.yaml new file mode 100644 index 00000000000..51f7445a14e --- /dev/null +++ b/backend/src/v2/driver/test_data/cache_test.yaml @@ -0,0 +1,176 @@ +# PIPELINE DEFINITION +# Name: primary-pipeline +components: + comp-analyze-artifact: + executorLabel: exec-analyze-artifact + inputDefinitions: + artifacts: + data_input: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + output_artifact: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-create-dataset: + executorLabel: exec-create-dataset + outputDefinitions: + artifacts: + output_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-process-dataset: + executorLabel: exec-process-dataset + inputDefinitions: + artifacts: + input_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + output_artifact: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-analyze-artifact: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - analyze_artifact + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef analyze_artifact(data_input: Input[Artifact], output_artifact:\ + \ Output[Artifact]):\n with open(data_input.path, \"r\") as f:\n \ + \ data = f.read()\n assert data == \"very_bad\"\n with open(output_artifact.path,\ + \ \"w\") as f:\n f.write(f'done_analyzing')\n\n" + image: python:3.11 + exec-create-dataset: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - create_dataset + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef create_dataset(output_dataset: Output[Dataset]):\n with open(output_dataset.path,\ + \ \"w\") as f:\n f.write('hurricane')\n output_dataset.metadata[\"\ + category\"] = 5\n output_dataset.metadata[\"description\"] = \"A simple\ + \ dataset\"\n\n" + image: python:3.11 + exec-process-dataset: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - process_dataset + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef process_dataset(input_dataset: Input[Dataset], output_artifact:\ + \ Output[Artifact]):\n with open(input_dataset.path, \"r\") as f:\n \ + \ data = f.read()\n assert data == \"hurricane\"\n with open(output_artifact.path,\ + \ \"w\") as f:\n f.write(f'very_bad')\n\n" + image: python:3.11 +pipelineInfo: + name: primary-pipeline +root: + dag: + tasks: + analyze-artifact: + cachingOptions: {} + componentRef: + name: comp-analyze-artifact + dependentTasks: + - process-dataset + inputs: + artifacts: + data_input: + taskOutputArtifact: + outputArtifactKey: output_artifact + producerTask: process-dataset + taskInfo: + name: analyze-artifact + create-dataset: + cachingOptions: {} + componentRef: + name: comp-create-dataset + taskInfo: + name: create-dataset + process-dataset: + cachingOptions: + enableCache: true + componentRef: + name: comp-process-dataset + dependentTasks: + - create-dataset + inputs: + artifacts: + input_dataset: + taskOutputArtifact: + outputArtifactKey: output_dataset + producerTask: create-dataset + taskInfo: + name: process-dataset +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.6 diff --git a/backend/src/v2/driver/test_data/componentInput.py b/backend/src/v2/driver/test_data/componentInput.py new file mode 100644 index 00000000000..42c572f0b6e --- /dev/null +++ b/backend/src/v2/driver/test_data/componentInput.py @@ -0,0 +1,62 @@ +import functools +import os + +from kfp import dsl +from kfp.dsl import Input, Output, Dataset, Model, Artifact + +base_image="quay.io/opendatahub/ds-pipelines-ci-executor-image:v1.0" +dsl.component = functools.partial(dsl.component, base_image=base_image) + +@dsl.component +def process_inputs( + name: str, + number: int, + threshold: float, + active: bool, + a_runtime_string: str, + a_runtime_number: int, + a_runtime_bool: bool, + output_text: Output[Dataset] +) -> None: + with open(output_text.path, 'w') as f: + f.write(f"[{name}, {number}, {threshold}, {active}]") + + assert name == "default_name" + assert number == 42 + assert threshold == 0.5 + assert active == True + assert a_runtime_string == "foo" + assert a_runtime_number == 10 + assert a_runtime_bool == True + +@dsl.component +def analyze_inputs(input_text: Input[Dataset]): + with open(input_text.path, 'r') as f: + data = f.read() + assert data == "[default_name, 42, 0.5, True]" + +@dsl.pipeline +def primary_pipeline( + name_in: str = "default_name", + number_in: int = 42, + threshold_in: float = 0.5, + active_in: bool = True, +): + process_inputs_task = process_inputs( + name=name_in, + number=number_in, + threshold=threshold_in, + active=active_in, + a_runtime_string="foo", + a_runtime_number=10, + a_runtime_bool=True, + ) + analyze_inputs(input_text=process_inputs_task.outputs['output_text']) + + +if __name__ == '__main__': + from kfp import compiler + + compiler.Compiler().compile( + pipeline_func=primary_pipeline, + package_path=__file__.replace('.py', '.yaml')) diff --git a/backend/src/v2/driver/test_data/componentInput.yaml b/backend/src/v2/driver/test_data/componentInput.yaml new file mode 100644 index 00000000000..73cbaaf830d --- /dev/null +++ b/backend/src/v2/driver/test_data/componentInput.yaml @@ -0,0 +1,172 @@ +# PIPELINE DEFINITION +# Name: primary-pipeline +# Inputs: +# active_in: bool [Default: True] +# name_in: str [Default: 'default_name'] +# number_in: int [Default: 42.0] +# threshold_in: float [Default: 0.5] +components: + comp-analyze-inputs: + executorLabel: exec-analyze-inputs + inputDefinitions: + artifacts: + input_text: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-process-inputs: + executorLabel: exec-process-inputs + inputDefinitions: + parameters: + a_runtime_bool: + parameterType: BOOLEAN + a_runtime_number: + parameterType: NUMBER_INTEGER + a_runtime_string: + parameterType: STRING + active: + parameterType: BOOLEAN + name: + parameterType: STRING + number: + parameterType: NUMBER_INTEGER + threshold: + parameterType: NUMBER_DOUBLE + outputDefinitions: + artifacts: + output_text: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-analyze-inputs: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - analyze_inputs + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef analyze_inputs(input_text: Input[Dataset]):\n with open(input_text.path,\ + \ 'r') as f:\n data = f.read()\n assert data == \"[default_name,\ + \ 42, 0.5, True]\"\n\n" + image: quay.io/opendatahub/ds-pipelines-ci-executor-image:v1.0 + exec-process-inputs: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - process_inputs + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef process_inputs(\n name: str,\n number: int,\n \ + \ threshold: float,\n active: bool,\n a_runtime_string:\ + \ str,\n a_runtime_number: int,\n a_runtime_bool: bool,\n\ + \ output_text: Output[Dataset]\n) -> None:\n with open(output_text.path,\ + \ 'w') as f:\n f.write(f\"[{name}, {number}, {threshold}, {active}]\"\ + )\n\n assert name == \"default_name\"\n assert number == 42\n assert\ + \ threshold == 0.5\n assert active == True\n assert a_runtime_string\ + \ == \"foo\"\n assert a_runtime_number == 10\n assert a_runtime_bool\ + \ == True\n\n" + image: quay.io/opendatahub/ds-pipelines-ci-executor-image:v1.0 +pipelineInfo: + name: primary-pipeline +root: + dag: + tasks: + analyze-inputs: + cachingOptions: {} + componentRef: + name: comp-analyze-inputs + dependentTasks: + - process-inputs + inputs: + artifacts: + input_text: + taskOutputArtifact: + outputArtifactKey: output_text + producerTask: process-inputs + taskInfo: + name: analyze-inputs + process-inputs: + cachingOptions: {} + componentRef: + name: comp-process-inputs + inputs: + parameters: + a_runtime_bool: + runtimeValue: + constant: true + a_runtime_number: + runtimeValue: + constant: 10.0 + a_runtime_string: + runtimeValue: + constant: foo + active: + componentInputParameter: active_in + name: + componentInputParameter: name_in + number: + componentInputParameter: number_in + threshold: + componentInputParameter: threshold_in + taskInfo: + name: process-inputs + inputDefinitions: + parameters: + active_in: + defaultValue: true + isOptional: true + parameterType: BOOLEAN + name_in: + defaultValue: default_name + isOptional: true + parameterType: STRING + number_in: + defaultValue: 42.0 + isOptional: true + parameterType: NUMBER_INTEGER + threshold_in: + defaultValue: 0.5 + isOptional: true + parameterType: NUMBER_DOUBLE +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.6 diff --git a/backend/src/v2/driver/test_data/component_with_optional_inputs.py b/backend/src/v2/driver/test_data/component_with_optional_inputs.py new file mode 100644 index 00000000000..de452e732f0 --- /dev/null +++ b/backend/src/v2/driver/test_data/component_with_optional_inputs.py @@ -0,0 +1,83 @@ +# Copyright 2023 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Optional, Dict, List + +from kfp import compiler +from kfp import dsl +from kfp.dsl import component + + +@component +def component_op( + input_str1: Optional[str] = 'string default value', + input_str2: Optional[str] = None, + input_str3: Optional[str] = None, + input_str4_from_pipeline: Optional[str] = "Some component default", + input_str5_from_pipeline: Optional[str] = "Some component default", + input_str6_from_pipeline: Optional[str] = None, + input_bool1: Optional[bool] = True, + input_bool2: Optional[bool] = None, + input_dict: Optional[Dict[str, int]] = {"a": 1}, + input_list: Optional[List[str]] = ["123"], + input_int: Optional[int] = 100, +): + # When no values are provided to the pipeline's input + assert input_str1 == 'Hello' + assert input_str2 == 'World' + assert input_str3 is None + assert input_str4_from_pipeline == "Some component default" + assert input_str5_from_pipeline == "Some pipeline default" + assert input_str6_from_pipeline is None + assert input_bool1 is True + assert input_bool2 is None + assert input_dict == {"a": 1} + assert input_list == ["123"] + assert input_int == 100 + + print(f'input_str1: {input_str1}, type: {type(input_str1)}') + print(f'input_str2: {input_str2}, type: {type(input_str2)}') + print(f'input_str3: {input_str3}, type: {type(input_str3)}') + print(f'input_str4_from_pipeline: {input_str4_from_pipeline}, type: {type(input_str4_from_pipeline)}') + print(f'input_str5_from_pipeline: {input_str5_from_pipeline}, type: {type(input_str5_from_pipeline)}') + print(f'input_str6_from_pipeline: {input_str6_from_pipeline}, type: {type(input_str6_from_pipeline)}') + print(f'input_bool1: {input_bool1}, type: {type(input_bool1)}') + print(f'input_bool2: {input_bool2}, type: {type(input_bool2)}') + print(f'input_dict: {input_dict}, type: {type(input_dict)}') + print(f'input_list: {input_list}, type: {type(input_list)}') + print(f'input_int: {input_int}, type: {type(input_int)}') + +# When pipeline defines None as default value for an input, +# The component's default value is used. + +# Run this pipeline without setting any runtime values +# Note that if running from the UI, the form requires +# inputs, and empty field != null / empty value +@dsl.pipeline(name='v2-component-optional-input') +def pipeline( + input_str4: Optional[str] = None, + input_str5: Optional[str] = "Some pipeline default", + input_str6: Optional[str] = None +): + component_op( + input_str1='Hello', + input_str2='World', + input_str4_from_pipeline=input_str4, + input_str5_from_pipeline=input_str5, + input_str6_from_pipeline=input_str6, + ) + + +if __name__ == '__main__': + compiler.Compiler().compile( + pipeline_func=pipeline, package_path=__file__.replace('.py', '.yaml')) diff --git a/backend/src/v2/driver/test_data/component_with_optional_inputs.yaml b/backend/src/v2/driver/test_data/component_with_optional_inputs.yaml new file mode 100644 index 00000000000..3f1cf4ab209 --- /dev/null +++ b/backend/src/v2/driver/test_data/component_with_optional_inputs.yaml @@ -0,0 +1,147 @@ +# PIPELINE DEFINITION +# Name: v2-component-optional-input +# Inputs: +# input_str4: str +# input_str5: str [Default: 'Some pipeline default'] +# input_str6: str +components: + comp-component-op: + executorLabel: exec-component-op + inputDefinitions: + parameters: + input_bool1: + defaultValue: true + isOptional: true + parameterType: BOOLEAN + input_bool2: + isOptional: true + parameterType: BOOLEAN + input_dict: + defaultValue: + a: 1.0 + isOptional: true + parameterType: STRUCT + input_int: + defaultValue: 100.0 + isOptional: true + parameterType: NUMBER_INTEGER + input_list: + defaultValue: + - '123' + isOptional: true + parameterType: LIST + input_str1: + defaultValue: string default value + isOptional: true + parameterType: STRING + input_str2: + isOptional: true + parameterType: STRING + input_str3: + isOptional: true + parameterType: STRING + input_str4_from_pipeline: + defaultValue: Some component default + isOptional: true + parameterType: STRING + input_str5_from_pipeline: + defaultValue: Some component default + isOptional: true + parameterType: STRING + input_str6_from_pipeline: + isOptional: true + parameterType: STRING +deploymentSpec: + executors: + exec-component-op: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_op + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_op(\n input_str1: Optional[str] = 'string default\ + \ value',\n input_str2: Optional[str] = None,\n input_str3: Optional[str]\ + \ = None,\n input_str4_from_pipeline: Optional[str] = \"Some component\ + \ default\",\n input_str5_from_pipeline: Optional[str] = \"Some component\ + \ default\",\n input_str6_from_pipeline: Optional[str] = None,\n input_bool1:\ + \ Optional[bool] = True,\n input_bool2: Optional[bool] = None,\n input_dict:\ + \ Optional[Dict[str, int]] = {\"a\": 1},\n input_list: Optional[List[str]]\ + \ = [\"123\"],\n input_int: Optional[int] = 100,\n):\n # When no values\ + \ are provided to the pipeline's input\n assert input_str1 == 'Hello'\n\ + \ assert input_str2 == 'World'\n assert input_str3 is None\n assert\ + \ input_str4_from_pipeline == \"Some component default\"\n assert input_str5_from_pipeline\ + \ == \"Some pipeline default\"\n assert input_str6_from_pipeline is None\n\ + \ assert input_bool1 is True\n assert input_bool2 is None\n assert\ + \ input_dict == {\"a\": 1}\n assert input_list == [\"123\"]\n assert\ + \ input_int == 100\n\n print(f'input_str1: {input_str1}, type: {type(input_str1)}')\n\ + \ print(f'input_str2: {input_str2}, type: {type(input_str2)}')\n print(f'input_str3:\ + \ {input_str3}, type: {type(input_str3)}')\n print(f'input_str4_from_pipeline:\ + \ {input_str4_from_pipeline}, type: {type(input_str4_from_pipeline)}')\n\ + \ print(f'input_str5_from_pipeline: {input_str5_from_pipeline}, type:\ + \ {type(input_str5_from_pipeline)}')\n print(f'input_str6_from_pipeline:\ + \ {input_str6_from_pipeline}, type: {type(input_str6_from_pipeline)}')\n\ + \ print(f'input_bool1: {input_bool1}, type: {type(input_bool1)}')\n \ + \ print(f'input_bool2: {input_bool2}, type: {type(input_bool2)}')\n \ + \ print(f'input_dict: {input_dict}, type: {type(input_dict)}')\n print(f'input_list:\ + \ {input_list}, type: {type(input_list)}')\n print(f'input_int: {input_int},\ + \ type: {type(input_int)}')\n\n" + image: python:3.11 +pipelineInfo: + name: v2-component-optional-input +root: + dag: + tasks: + component-op: + cachingOptions: {} + componentRef: + name: comp-component-op + inputs: + parameters: + input_str1: + runtimeValue: + constant: Hello + input_str2: + runtimeValue: + constant: World + input_str4_from_pipeline: + componentInputParameter: input_str4 + input_str5_from_pipeline: + componentInputParameter: input_str5 + input_str6_from_pipeline: + componentInputParameter: input_str6 + taskInfo: + name: component-op + inputDefinitions: + parameters: + input_str4: + isOptional: true + parameterType: STRING + input_str5: + defaultValue: Some pipeline default + isOptional: true + parameterType: STRING + input_str6: + isOptional: true + parameterType: STRING +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.6 diff --git a/backend/src/v2/driver/test_data/k8s_parameters.py b/backend/src/v2/driver/test_data/k8s_parameters.py new file mode 100644 index 00000000000..8acc9f4154e --- /dev/null +++ b/backend/src/v2/driver/test_data/k8s_parameters.py @@ -0,0 +1,513 @@ +from typing import Optional, List + +from kfp import dsl +from kfp import kubernetes +from kfp.dsl import Output, OutputPath + + +@dsl.component(packages_to_install=['kubernetes']) +def assert_values(): + import os + from kubernetes import client, config + + cfg_key_1 = os.getenv("CFG_KEY_1", "didn't work") + cfg_key_2 = os.getenv("CFG_KEY_2", "didn't work") + cfg_key_3 = os.getenv("CFG_KEY_3", "didn't work") + cfg_key_4 = os.getenv("CFG_KEY_4", "didn't work") + + assert cfg_key_1 == "value1" + assert cfg_key_2 == "value2" + assert cfg_key_3 == "value3" + assert cfg_key_4 == "value4" + + with open('/tmp/config_map/cfgKey3', 'r') as f: + assert f.read() == "value3" + + secret_key_1 = os.getenv("SECRET_KEY_1", "didn't work") + secret_key_2 = os.getenv("SECRET_KEY_2", "didn't work") + secret_key_3 = os.getenv("SECRET_KEY_3", "didn't work") + secret_key_4 = os.getenv("SECRET_KEY_4", "didn't work") + + assert secret_key_1 == "value1" + assert secret_key_2 == "value2" + assert secret_key_3 == "value3" + assert secret_key_4 == "value4" + + with open('/tmp/secret/secretKey3', 'r') as f: + assert f.read() == "value3" + + # Get pod YAML + config.load_incluster_config() + v1 = client.CoreV1Api() + pod_name = os.getenv('HOSTNAME') + namespace = open('/var/run/secrets/kubernetes.io/serviceaccount/namespace').read() + pod = v1.read_namespaced_pod(pod_name, namespace) + + # Get pod's pull secrets + print("\nPod Pull Secrets:") + pull_secrets = [] + if pod.spec.image_pull_secrets: + for secret in pod.spec.image_pull_secrets: + print(f"Secret name: {secret.name}") + pull_secrets.append(secret.name) + + assert len(pull_secrets) == 6 + print(pull_secrets) + assert pull_secrets == ['pull-secret-1', 'pull-secret-2', 'pull-secret-1', 'pull-secret-2', 'pull-secret-3', 'pull-secret-4'] + + # Get pod's node selector + print("\nPod Node Selector:") + node_selector = pod.spec.node_selector + print(node_selector) + if node_selector: + for key, value in node_selector.items(): + print(f"Node selector {key}: {value}") + + assert node_selector == {"kubernetes.io/arch": "amd64",} + + # Get pod's tolerations + print("\nPod Tolerations:") + tolerations = pod.spec.tolerations + print(tolerations) + + # Get pod's node affinity + print("\nPod Node Affinity:") + node_affinity = pod.spec.affinity.node_affinity if pod.spec.affinity else None + print(node_affinity) + + # Helper function to check node affinity match expression + def has_match_expression(expressions, key, operator, values): + for expr in expressions: + if (expr.key == key and + expr.operator == operator and + expr.values == values): + return True + return False + + # Check node affinity rules + assert node_affinity is not None + required_terms = node_affinity.required_during_scheduling_ignored_during_execution.node_selector_terms + assert len(required_terms) == 1 + match_expressions = required_terms[0].match_expressions + assert len(match_expressions) == 1 + assert has_match_expression(match_expressions, "kubernetes.io/os", "In", ["linux"]) + + # Helper function to check if a toleration exists + def has_toleration(key, effect, operator, value=None, toleration_seconds=None): + for t in tolerations: + if (t.key == key and + t.effect == effect and + t.operator == operator and + t.value == value and + t.toleration_seconds == toleration_seconds): + return True + return False + + # Check each toleration individually + assert has_toleration('some_foo_key1', 'NoSchedule', 'Equal', 'value1') + assert has_toleration('some_foo_key2', 'NoExecute', 'Exists') + assert has_toleration('some_foo_key3', 'NoSchedule', 'Equal', 'value1') + assert has_toleration('some_foo_key4', 'NoSchedule', 'Equal', 'value2') + assert has_toleration('some_foo_key5', 'NoExecute', 'Exists') + assert has_toleration('some_foo_key6', 'NoSchedule', 'Equal', 'value3') + + # Get pod's PVCs and Empty Dir + print("\nPod Volumes and PVCs:") + volumes = pod.spec.volumes + pvcs = [] + empty_dir = [] + if volumes: + for volume in volumes: + if volume.persistent_volume_claim: + print(f"Volume name: {volume.name}") + print(f"PVC name: {volume.persistent_volume_claim.claim_name}") + pvcs.append(volume.persistent_volume_claim) + assert len(pvcs) == 1 + assert pvcs[0].claim_name.endswith('pvc-1') + +@dsl.component(packages_to_install=['kubernetes']) +def assert_values_two(): + from kubernetes import client, config + import os + + # Get pod YAML + config.load_incluster_config() + v1 = client.CoreV1Api() + pod_name = os.getenv('HOSTNAME') + namespace = open('/var/run/secrets/kubernetes.io/serviceaccount/namespace').read() + pod = v1.read_namespaced_pod(pod_name, namespace) + + print("\nPod Node Selector:") + node_selector = pod.spec.node_selector + print(node_selector) + assert node_selector == {"kubernetes.io/os": "linux"} + + # Get pod's node affinity + print("\nPod Node Affinity:") + node_affinity = pod.spec.affinity.node_affinity if pod.spec.affinity else None + print(node_affinity) + + # Helper function to check node affinity match expression + def has_match_expression(expressions, key, operator, values): + for expr in expressions: + if (expr.key == key and + expr.operator == operator and + expr.values == values): + return True + return False + + # Check node affinity rules + assert node_affinity is not None + required_terms = node_affinity.required_during_scheduling_ignored_during_execution.node_selector_terms + assert len(required_terms) == 1 + match_expressions = required_terms[0].match_expressions + assert len(match_expressions) == 1 + assert has_match_expression(match_expressions, "kubernetes.io/os", "In", ["linux"]) + +@dsl.component(packages_to_install=['kubernetes']) +def assert_values_three(): + from kubernetes import client, config + import os + + # Get pod YAML + config.load_incluster_config() + v1 = client.CoreV1Api() + pod_name = os.getenv('HOSTNAME') + namespace = open('/var/run/secrets/kubernetes.io/serviceaccount/namespace').read() + pod = v1.read_namespaced_pod(pod_name, namespace) + + tolerations = pod.spec.tolerations + print(tolerations) + + # Helper function to check if a toleration exists + def has_toleration(key, effect, operator, value=None, toleration_seconds=None): + for t in tolerations: + if (t.key == key and + t.effect == effect and + t.operator == operator and + t.value == value and + t.toleration_seconds == toleration_seconds): + return True + return False + + # Check toleration + assert has_toleration('some_foo_key4', 'NoSchedule', 'Equal', 'value2') + assert has_toleration('some_foo_key5', 'NoExecute', 'Exists') + +@dsl.component() +def cfg_name_generator(some_output: OutputPath(str)): + configmap_name = "cfg-3" + with open(some_output, 'w') as f: + f.write(configmap_name) + +@dsl.component() +def secret_name_generator(some_output: OutputPath(str)): + secret_name = "secret-3" + with open(some_output, 'w') as f: + f.write(secret_name) + +@dsl.component() +def get_access_mode(access_mode: OutputPath(List[str])): + import json + with open(access_mode, 'w') as f: + f.write(json.dumps(["ReadWriteOnce"])) + +@dsl.component() +def get_node_affinity(node_affinity: OutputPath(dict)): + import json + with open(node_affinity, 'w') as f: + f.write(json.dumps( + { + "requiredDuringSchedulingIgnoredDuringExecution": { + "nodeSelectorTerms": [ + { + "matchExpressions": [ + { + "key": "kubernetes.io/os", + "operator": "In", + "values": ["linux"] + } + ] + } + ] + } + } + )) + +@dsl.component() +def generate_requests_resources(cpu_request_out: OutputPath(str), memory_request_out: OutputPath(str)): + with open(cpu_request_out, 'w') as f: + f.write('100m') + with open(memory_request_out, 'w') as f: + f.write('500Mi') + +@dsl.component() +def get_node_affinity(node_affinity: OutputPath(dict)): + import json + with open(node_affinity, 'w') as f: + f.write(json.dumps( + { + "requiredDuringSchedulingIgnoredDuringExecution": { + "nodeSelectorTerms": [ + { + "matchExpressions": [ + { + "key": "kubernetes.io/os", + "operator": "In", + "values": ["linux"] + } + ] + } + ] + } + } + )) + +# TODO (HumairAK): Empty Dir and Field Path TaskOutputParameters +# not supported yet +# @dsl.component() +# def get_empty_dir_volume_name(volume_name: OutputPath(str)): +# with open(volume_name, 'w') as f: +# f.write("bar-dir") +# +# @dsl.component() +# def get_field_path_env_var(env_var: OutputPath(str)): +# with open(env_var, 'w') as f: +# f.write("SERVICE_ACCOUNT_NAME") + +node_selector_default = {"kubernetes.io/os": "linux"} + +toleration_list_default = [ + { + "key": "some_foo_key4", + "operator": "Equal", + "value": "value2", + "effect": "NoSchedule" + }, + { + "key": "some_foo_key5", + "operator": "Exists", + "effect": "NoExecute" + } +] + +toleration_dict_default = { + "key": "some_foo_key6", + "operator": "Equal", + "value": "value3", + "effect": "NoSchedule" +} + +@dsl.pipeline +def secondary_pipeline(train_tolerations: list): + task = assert_values_three() + kubernetes.add_toleration_json(task, train_tolerations) + +default_node_affinity = { + "requiredDuringSchedulingIgnoredDuringExecution": { + "nodeSelectorTerms": [ + { + "matchExpressions": [ + { + "key": "kubernetes.io/os", + "operator": "In", + "values": ["linux"] + } + ] + } + ] + } + } + +@dsl.pipeline +def primary_pipeline( + configmap_parm: str = 'cfg-2', + secret_param: str = 'secret-2', + pull_secret_1: str = 'pull-secret-1', + pull_secret_2: str = 'pull-secret-2', + pull_secret_3: str = 'pull-secret-3', + node_selector_input: dict = {"kubernetes.io/os": "linux"}, + tolerations_list_input: list = toleration_list_default, + tolerations_dict_input: dict = toleration_dict_default, + pvc_name_suffix_input: str = '-pvc-1', + empty_dir_mnt_path: str = '/empty_dir/path', + field_path: str = 'spec.serviceAccountName', + default_node_affinity_input: dict = default_node_affinity, + cpu_limit: str = '200m', + memory_limit: str = '500Mi', + container_image: str = 'python:3.9', +): + + cfg_name_generator_task = cfg_name_generator() + secret_name_generator_task = secret_name_generator() + + task = assert_values().set_caching_options(enable_caching=False) + + # configmap verification + kubernetes.use_config_map_as_env( + task, + config_map_name='cfg-1', + config_map_key_to_env={ + 'cfgKey1': 'CFG_KEY_1', + 'cfgKey2': 'CFG_KEY_2' + }) + kubernetes.use_config_map_as_env( + task, + config_map_name=configmap_parm, + config_map_key_to_env={ + 'cfgKey3': 'CFG_KEY_3', + }) + kubernetes.use_config_map_as_env( + task, + config_map_name=cfg_name_generator_task.output, + config_map_key_to_env={ + 'cfgKey4': 'CFG_KEY_4', + }) + + kubernetes.use_config_map_as_volume( + task, + config_map_name=configmap_parm, + mount_path='/tmp/config_map') + + # secret verification + kubernetes.use_secret_as_env( + task, + secret_name='secret-1', + secret_key_to_env={ + 'secretKey1': 'SECRET_KEY_1', + 'secretKey2': 'SECRET_KEY_2' + }) + kubernetes.use_secret_as_env( + task, + secret_name=secret_param, + secret_key_to_env={ + 'secretKey3': 'SECRET_KEY_3', + }) + kubernetes.use_secret_as_env( + task, + secret_name=secret_name_generator_task.output, + secret_key_to_env={ + 'secretKey4': 'SECRET_KEY_4', + }) + + kubernetes.use_secret_as_volume( + task, + secret_name=secret_param, + mount_path='/tmp/secret') + + # pull secrets + kubernetes.set_image_pull_secrets( + task, + secret_names=[pull_secret_1, pull_secret_2]) + kubernetes.set_image_pull_secrets( + task, + secret_names=["pull-secret-1", "pull-secret-2"]) + kubernetes.set_image_pull_secrets( + task, + secret_names=([pull_secret_3, "pull-secret-4"]) + ) + + # node selector + kubernetes.add_node_selector_json( + task, + node_selector_json={ + "kubernetes.io/arch": "amd64", + }, + ) + + # You can't append node selectors, to verify ComponentInput option + # in another task + task_2 = assert_values_two().set_caching_options(enable_caching=False) + kubernetes.add_node_selector_json( + task_2, + node_selector_json=node_selector_input, + ) + + # tolerations + kubernetes.add_toleration_json(task, [ + { + "key": "some_foo_key1", + "operator": "Equal", + "value": "value1", + "effect": "NoSchedule" + }, + { + "key": "some_foo_key2", + "operator": "Exists", + "effect": "NoExecute" + } + ]) + kubernetes.add_toleration_json(task, { + "key": "some_foo_key3", + "operator": "Equal", + "value": "value1", + "effect": "NoSchedule" + }) + kubernetes.add_toleration_json(task, tolerations_dict_input) + kubernetes.add_toleration_json(task, tolerations_list_input) + + # cpu/memory/container image + generate_requests_resources_task = generate_requests_resources() + task.set_cpu_request(generate_requests_resources_task.outputs["cpu_request_out"]) + task.set_memory_request(generate_requests_resources_task.outputs["memory_request_out"]) + + task.set_cpu_limit(cpu_limit) + task.set_memory_limit(memory_limit) + task.set_container_image(container_image) + + # Test nested toleration + secondary_pipeline(train_tolerations=tolerations_list_input) + + # PVCs + access_mode_task = get_access_mode() + output_pvc_task = kubernetes.CreatePVC( + pvc_name_suffix=pvc_name_suffix_input, # Component Input Parameter + access_modes=access_mode_task.output, # Task Output Parameter + size="5Mi", # Runtime Constant + ) + kubernetes.mount_pvc( + task, + pvc_name=output_pvc_task.output, # Task Output Parameter + mount_path='/pvc/path') + output_pvc_delete_task = kubernetes.DeletePVC(pvc_name=output_pvc_task.output) + output_pvc_delete_task.after(task) + + # node affinity + get_node_affinity_task = get_node_affinity() + kubernetes.add_node_affinity_json( + task, + node_affinity_json=get_node_affinity_task.output # Task Output Parameter + ) + + kubernetes.add_node_affinity_json( + task_2, + default_node_affinity_input # Component Input Parameter + ) + + + # TODO(HumairAK) Empty dir doesn't support parameterization + # empty dir + # empty_dir_volume_name_task = get_empty_dir_volume_name() + # kubernetes.empty_dir_mount( + # task, + # empty_dir_volume_name_task.output, + # empty_dir_mnt_path, + # None, + # '1Mi', + # ) + + # TODO(HumairAK) Field Path doesn't support parameterization + # field Path + # get_field_path_env_var_task = get_field_path_env_var() + # kubernetes.use_field_path_as_env( + # task, + # field_path=field_path, # Component Input Parameter + # env_name=get_field_path_env_var_task.output, # Task Output Parameter + # ) + + +if __name__ == '__main__': + from kfp import compiler + compiler.Compiler().compile( + pipeline_func=primary_pipeline, + package_path=__file__.replace('.py', '.yaml')) \ No newline at end of file diff --git a/backend/src/v2/driver/test_data/k8s_parameters.yaml b/backend/src/v2/driver/test_data/k8s_parameters.yaml new file mode 100644 index 00000000000..91cd01577ae --- /dev/null +++ b/backend/src/v2/driver/test_data/k8s_parameters.yaml @@ -0,0 +1,829 @@ +# PIPELINE DEFINITION +# Name: primary-pipeline +# Inputs: +# configmap_parm: str [Default: 'cfg-2'] +# container_image: str [Default: 'python:3.9'] +# cpu_limit: str [Default: '200m'] +# default_node_affinity_input: dict [Default: {'requiredDuringSchedulingIgnoredDuringExecution': {'nodeSelectorTerms': [{'matchExpressions': [{'operator': 'In', 'key': 'kubernetes.io/os', 'values': ['linux']}]}]}}] +# empty_dir_mnt_path: str [Default: '/empty_dir/path'] +# field_path: str [Default: 'spec.serviceAccountName'] +# memory_limit: str [Default: '500Mi'] +# node_selector_input: dict [Default: {'kubernetes.io/os': 'linux'}] +# pull_secret_1: str [Default: 'pull-secret-1'] +# pull_secret_2: str [Default: 'pull-secret-2'] +# pull_secret_3: str [Default: 'pull-secret-3'] +# pvc_name_suffix_input: str [Default: '-pvc-1'] +# secret_param: str [Default: 'secret-2'] +# tolerations_dict_input: dict [Default: {'value': 'value3', 'operator': 'Equal', 'key': 'some_foo_key6', 'effect': 'NoSchedule'}] +# tolerations_list_input: list [Default: [{'value': 'value2', 'operator': 'Equal', 'key': 'some_foo_key4', 'effect': 'NoSchedule'}, {'operator': 'Exists', 'key': 'some_foo_key5', 'effect': 'NoExecute'}]] +components: + comp-assert-values: + executorLabel: exec-assert-values + comp-assert-values-three: + executorLabel: exec-assert-values-three + comp-assert-values-two: + executorLabel: exec-assert-values-two + comp-cfg-name-generator: + executorLabel: exec-cfg-name-generator + outputDefinitions: + parameters: + some_output: + parameterType: STRING + comp-createpvc: + executorLabel: exec-createpvc + inputDefinitions: + parameters: + access_modes: + description: 'AccessModes to request for the provisioned PVC. May + + be one or more of ``''ReadWriteOnce''``, ``''ReadOnlyMany''``, ``''ReadWriteMany''``, + or + + ``''ReadWriteOncePod''``. Corresponds to `PersistentVolumeClaim.spec.accessModes + `_.' + parameterType: LIST + annotations: + description: Annotations for the PVC's metadata. Corresponds to `PersistentVolumeClaim.metadata.annotations + `_. + isOptional: true + parameterType: STRUCT + pvc_name: + description: 'Name of the PVC. Corresponds to `PersistentVolumeClaim.metadata.name + `_. + Only one of ``pvc_name`` and ``pvc_name_suffix`` can + + be provided.' + isOptional: true + parameterType: STRING + pvc_name_suffix: + description: 'Prefix to use for a dynamically generated name, which + + will take the form ``-``. Only one + + of ``pvc_name`` and ``pvc_name_suffix`` can be provided.' + isOptional: true + parameterType: STRING + size: + description: The size of storage requested by the PVC that will be provisioned. + For example, ``'5Gi'``. Corresponds to `PersistentVolumeClaim.spec.resources.requests.storage + `_. + parameterType: STRING + storage_class_name: + defaultValue: '' + description: 'Name of StorageClass from which to provision the PV + + to back the PVC. ``None`` indicates to use the cluster''s default + + storage_class_name. Set to ``''''`` for a statically specified PVC.' + isOptional: true + parameterType: STRING + volume_name: + description: 'Pre-existing PersistentVolume that should back the + + provisioned PersistentVolumeClaim. Used for statically + + specified PV only. Corresponds to `PersistentVolumeClaim.spec.volumeName + `_.' + isOptional: true + parameterType: STRING + outputDefinitions: + parameters: + name: + parameterType: STRING + comp-deletepvc: + executorLabel: exec-deletepvc + inputDefinitions: + parameters: + pvc_name: + description: Name of the PVC to delete. Supports passing a runtime-generated + name, such as a name provided by ``kubernetes.CreatePvcOp().outputs['name']``. + parameterType: STRING + comp-generate-requests-resources: + executorLabel: exec-generate-requests-resources + outputDefinitions: + parameters: + cpu_request_out: + parameterType: STRING + memory_request_out: + parameterType: STRING + comp-get-access-mode: + executorLabel: exec-get-access-mode + outputDefinitions: + parameters: + access_mode: + parameterType: LIST + comp-get-node-affinity: + executorLabel: exec-get-node-affinity + outputDefinitions: + parameters: + node_affinity: + parameterType: STRUCT + comp-secondary-pipeline: + dag: + tasks: + assert-values-three: + cachingOptions: {} + componentRef: + name: comp-assert-values-three + taskInfo: + name: assert-values-three + inputDefinitions: + parameters: + train_tolerations: + parameterType: LIST + comp-secret-name-generator: + executorLabel: exec-secret-name-generator + outputDefinitions: + parameters: + some_output: + parameterType: STRING +deploymentSpec: + executors: + exec-assert-values: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - assert_values + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kubernetes'\ + \ && python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef assert_values():\n import os\n from kubernetes import client,\ + \ config\n\n cfg_key_1 = os.getenv(\"CFG_KEY_1\", \"didn't work\")\n\ + \ cfg_key_2 = os.getenv(\"CFG_KEY_2\", \"didn't work\")\n cfg_key_3\ + \ = os.getenv(\"CFG_KEY_3\", \"didn't work\")\n cfg_key_4 = os.getenv(\"\ + CFG_KEY_4\", \"didn't work\")\n\n assert cfg_key_1 == \"value1\"\n \ + \ assert cfg_key_2 == \"value2\"\n assert cfg_key_3 == \"value3\"\n\ + \ assert cfg_key_4 == \"value4\"\n\n with open('/tmp/config_map/cfgKey3',\ + \ 'r') as f:\n assert f.read() == \"value3\"\n\n secret_key_1\ + \ = os.getenv(\"SECRET_KEY_1\", \"didn't work\")\n secret_key_2 = os.getenv(\"\ + SECRET_KEY_2\", \"didn't work\")\n secret_key_3 = os.getenv(\"SECRET_KEY_3\"\ + , \"didn't work\")\n secret_key_4 = os.getenv(\"SECRET_KEY_4\", \"didn't\ + \ work\")\n\n assert secret_key_1 == \"value1\"\n assert secret_key_2\ + \ == \"value2\"\n assert secret_key_3 == \"value3\"\n assert secret_key_4\ + \ == \"value4\"\n\n with open('/tmp/secret/secretKey3', 'r') as f:\n\ + \ assert f.read() == \"value3\"\n\n # Get pod YAML\n config.load_incluster_config()\n\ + \ v1 = client.CoreV1Api()\n pod_name = os.getenv('HOSTNAME')\n \ + \ namespace = open('/var/run/secrets/kubernetes.io/serviceaccount/namespace').read()\n\ + \ pod = v1.read_namespaced_pod(pod_name, namespace)\n\n # Get pod's\ + \ pull secrets\n print(\"\\nPod Pull Secrets:\")\n pull_secrets =\ + \ []\n if pod.spec.image_pull_secrets:\n for secret in pod.spec.image_pull_secrets:\n\ + \ print(f\"Secret name: {secret.name}\")\n pull_secrets.append(secret.name)\n\ + \n assert len(pull_secrets) == 6\n print(pull_secrets)\n assert\ + \ pull_secrets == ['pull-secret-1', 'pull-secret-2', 'pull-secret-1', 'pull-secret-2',\ + \ 'pull-secret-3', 'pull-secret-4']\n\n # Get pod's node selector\n \ + \ print(\"\\nPod Node Selector:\")\n node_selector = pod.spec.node_selector\n\ + \ print(node_selector)\n if node_selector:\n for key, value\ + \ in node_selector.items():\n print(f\"Node selector {key}: {value}\"\ + )\n\n assert node_selector == {\"kubernetes.io/arch\": \"amd64\",}\n\n\ + \ # Get pod's tolerations\n print(\"\\nPod Tolerations:\")\n tolerations\ + \ = pod.spec.tolerations\n print(tolerations)\n\n # Get pod's node\ + \ affinity\n print(\"\\nPod Node Affinity:\")\n node_affinity = pod.spec.affinity.node_affinity\ + \ if pod.spec.affinity else None\n print(node_affinity)\n\n # Helper\ + \ function to check node affinity match expression\n def has_match_expression(expressions,\ + \ key, operator, values):\n for expr in expressions:\n \ + \ if (expr.key == key and\n expr.operator == operator\ + \ and\n expr.values == values):\n return\ + \ True\n return False\n\n # Check node affinity rules\n assert\ + \ node_affinity is not None\n required_terms = node_affinity.required_during_scheduling_ignored_during_execution.node_selector_terms\n\ + \ assert len(required_terms) == 1\n match_expressions = required_terms[0].match_expressions\n\ + \ assert len(match_expressions) == 1\n assert has_match_expression(match_expressions,\ + \ \"kubernetes.io/os\", \"In\", [\"linux\"])\n\n # Helper function to\ + \ check if a toleration exists\n def has_toleration(key, effect, operator,\ + \ value=None, toleration_seconds=None):\n for t in tolerations:\n\ + \ if (t.key == key and\n t.effect == effect and\n\ + \ t.operator == operator and\n t.value ==\ + \ value and\n t.toleration_seconds == toleration_seconds):\n\ + \ return True\n return False\n\n # Check each toleration\ + \ individually\n assert has_toleration('some_foo_key1', 'NoSchedule',\ + \ 'Equal', 'value1')\n assert has_toleration('some_foo_key2', 'NoExecute',\ + \ 'Exists')\n assert has_toleration('some_foo_key3', 'NoSchedule', 'Equal',\ + \ 'value1')\n assert has_toleration('some_foo_key4', 'NoSchedule', 'Equal',\ + \ 'value2')\n assert has_toleration('some_foo_key5', 'NoExecute', 'Exists')\n\ + \ assert has_toleration('some_foo_key6', 'NoSchedule', 'Equal', 'value3')\n\ + \n # Get pod's PVCs and Empty Dir\n print(\"\\nPod Volumes and PVCs:\"\ + )\n volumes = pod.spec.volumes\n pvcs = []\n empty_dir = []\n \ + \ if volumes:\n for volume in volumes:\n if volume.persistent_volume_claim:\n\ + \ print(f\"Volume name: {volume.name}\")\n \ + \ print(f\"PVC name: {volume.persistent_volume_claim.claim_name}\")\n \ + \ pvcs.append(volume.persistent_volume_claim)\n assert\ + \ len(pvcs) == 1\n assert pvcs[0].claim_name.endswith('pvc-1')\n\n" + image: '{{$.inputs.parameters[''pipelinechannel--container_image'']}}' + resources: + resourceCpuLimit: '{{$.inputs.parameters[''pipelinechannel--cpu_limit'']}}' + resourceCpuRequest: '{{$.inputs.parameters[''pipelinechannel--generate-requests-resources-cpu_request_out'']}}' + resourceMemoryLimit: '{{$.inputs.parameters[''pipelinechannel--memory_limit'']}}' + resourceMemoryRequest: '{{$.inputs.parameters[''pipelinechannel--generate-requests-resources-memory_request_out'']}}' + exec-assert-values-three: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - assert_values_three + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kubernetes'\ + \ && python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef assert_values_three():\n from kubernetes import client, config\n\ + \ import os\n\n # Get pod YAML\n config.load_incluster_config()\n\ + \ v1 = client.CoreV1Api()\n pod_name = os.getenv('HOSTNAME')\n \ + \ namespace = open('/var/run/secrets/kubernetes.io/serviceaccount/namespace').read()\n\ + \ pod = v1.read_namespaced_pod(pod_name, namespace)\n\n tolerations\ + \ = pod.spec.tolerations\n print(tolerations)\n\n # Helper function\ + \ to check if a toleration exists\n def has_toleration(key, effect, operator,\ + \ value=None, toleration_seconds=None):\n for t in tolerations:\n\ + \ if (t.key == key and\n t.effect == effect\ + \ and\n t.operator == operator and\n \ + \ t.value == value and\n t.toleration_seconds == toleration_seconds):\n\ + \ return True\n return False\n\n # Check toleration\n\ + \ assert has_toleration('some_foo_key4', 'NoSchedule', 'Equal', 'value2')\n\ + \ assert has_toleration('some_foo_key5', 'NoExecute', 'Exists')\n\n" + image: python:3.11 + exec-assert-values-two: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - assert_values_two + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kubernetes'\ + \ && python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef assert_values_two():\n from kubernetes import client, config\n\ + \ import os\n\n # Get pod YAML\n config.load_incluster_config()\n\ + \ v1 = client.CoreV1Api()\n pod_name = os.getenv('HOSTNAME')\n \ + \ namespace = open('/var/run/secrets/kubernetes.io/serviceaccount/namespace').read()\n\ + \ pod = v1.read_namespaced_pod(pod_name, namespace)\n\n print(\"\\\ + nPod Node Selector:\")\n node_selector = pod.spec.node_selector\n \ + \ print(node_selector)\n assert node_selector == {\"kubernetes.io/os\"\ + : \"linux\"}\n\n # Get pod's node affinity\n print(\"\\nPod Node Affinity:\"\ + )\n node_affinity = pod.spec.affinity.node_affinity if pod.spec.affinity\ + \ else None\n print(node_affinity)\n\n # Helper function to check\ + \ node affinity match expression\n def has_match_expression(expressions,\ + \ key, operator, values):\n for expr in expressions:\n \ + \ if (expr.key == key and\n expr.operator == operator\ + \ and\n expr.values == values):\n return\ + \ True\n return False\n\n # Check node affinity rules\n assert\ + \ node_affinity is not None\n required_terms = node_affinity.required_during_scheduling_ignored_during_execution.node_selector_terms\n\ + \ assert len(required_terms) == 1\n match_expressions = required_terms[0].match_expressions\n\ + \ assert len(match_expressions) == 1\n assert has_match_expression(match_expressions,\ + \ \"kubernetes.io/os\", \"In\", [\"linux\"])\n\n" + image: python:3.11 + exec-cfg-name-generator: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - cfg_name_generator + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef cfg_name_generator(some_output: OutputPath(str)):\n configmap_name\ + \ = \"cfg-3\"\n with open(some_output, 'w') as f:\n f.write(configmap_name)\n\ + \n" + image: python:3.11 + exec-createpvc: + container: + image: argostub/createpvc + exec-deletepvc: + container: + image: argostub/deletepvc + exec-generate-requests-resources: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - generate_requests_resources + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef generate_requests_resources(cpu_request_out: OutputPath(str),\ + \ memory_request_out: OutputPath(str)):\n with open(cpu_request_out,\ + \ 'w') as f:\n f.write('100m')\n with open(memory_request_out,\ + \ 'w') as f:\n f.write('500Mi')\n\n" + image: python:3.11 + exec-get-access-mode: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - get_access_mode + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef get_access_mode(access_mode: OutputPath(List[str])):\n import\ + \ json\n with open(access_mode, 'w') as f:\n f.write(json.dumps([\"\ + ReadWriteOnce\"]))\n\n" + image: python:3.11 + exec-get-node-affinity: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - get_node_affinity + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef get_node_affinity(node_affinity: OutputPath(dict)):\n import\ + \ json\n with open(node_affinity, 'w') as f:\n f.write(json.dumps(\n\ + \ {\n \"requiredDuringSchedulingIgnoredDuringExecution\"\ + : {\n \"nodeSelectorTerms\": [\n \ + \ {\n \"matchExpressions\": [\n \ + \ {\n \"key\": \"\ + kubernetes.io/os\",\n \"operator\": \"\ + In\",\n \"values\": [\"linux\"]\n \ + \ }\n ]\n \ + \ }\n ]\n }\n \ + \ }\n ))\n\n" + image: python:3.11 + exec-secret-name-generator: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - secret_name_generator + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef secret_name_generator(some_output: OutputPath(str)):\n secret_name\ + \ = \"secret-3\"\n with open(some_output, 'w') as f:\n f.write(secret_name)\n\ + \n" + image: python:3.11 +pipelineInfo: + name: primary-pipeline +root: + dag: + tasks: + assert-values: + cachingOptions: {} + componentRef: + name: comp-assert-values + dependentTasks: + - cfg-name-generator + - createpvc + - get-node-affinity + - secret-name-generator + inputs: + parameters: + base_image: + runtimeValue: + constant: '{{$.inputs.parameters[''pipelinechannel--container_image'']}}' + cpu_limit: + runtimeValue: + constant: '{{$.inputs.parameters[''pipelinechannel--cpu_limit'']}}' + cpu_request: + runtimeValue: + constant: '{{$.inputs.parameters[''pipelinechannel--generate-requests-resources-cpu_request_out'']}}' + memory_limit: + runtimeValue: + constant: '{{$.inputs.parameters[''pipelinechannel--memory_limit'']}}' + memory_request: + runtimeValue: + constant: '{{$.inputs.parameters[''pipelinechannel--generate-requests-resources-memory_request_out'']}}' + pipelinechannel--container_image: + componentInputParameter: container_image + pipelinechannel--cpu_limit: + componentInputParameter: cpu_limit + pipelinechannel--generate-requests-resources-cpu_request_out: + taskOutputParameter: + outputParameterKey: cpu_request_out + producerTask: generate-requests-resources + pipelinechannel--generate-requests-resources-memory_request_out: + taskOutputParameter: + outputParameterKey: memory_request_out + producerTask: generate-requests-resources + pipelinechannel--memory_limit: + componentInputParameter: memory_limit + taskInfo: + name: assert-values + assert-values-two: + cachingOptions: {} + componentRef: + name: comp-assert-values-two + taskInfo: + name: assert-values-two + cfg-name-generator: + cachingOptions: {} + componentRef: + name: comp-cfg-name-generator + taskInfo: + name: cfg-name-generator + createpvc: + cachingOptions: {} + componentRef: + name: comp-createpvc + dependentTasks: + - get-access-mode + inputs: + parameters: + access_modes: + taskOutputParameter: + outputParameterKey: access_mode + producerTask: get-access-mode + pvc_name_suffix: + componentInputParameter: pvc_name_suffix_input + size: + runtimeValue: + constant: 5Mi + taskInfo: + name: createpvc + deletepvc: + cachingOptions: {} + componentRef: + name: comp-deletepvc + dependentTasks: + - assert-values + - createpvc + inputs: + parameters: + pvc_name: + taskOutputParameter: + outputParameterKey: name + producerTask: createpvc + taskInfo: + name: deletepvc + generate-requests-resources: + cachingOptions: {} + componentRef: + name: comp-generate-requests-resources + taskInfo: + name: generate-requests-resources + get-access-mode: + cachingOptions: {} + componentRef: + name: comp-get-access-mode + taskInfo: + name: get-access-mode + get-node-affinity: + cachingOptions: {} + componentRef: + name: comp-get-node-affinity + taskInfo: + name: get-node-affinity + secondary-pipeline: + cachingOptions: {} + componentRef: + name: comp-secondary-pipeline + inputs: + parameters: + train_tolerations: + componentInputParameter: tolerations_list_input + taskInfo: + name: secondary-pipeline + secret-name-generator: + cachingOptions: {} + componentRef: + name: comp-secret-name-generator + taskInfo: + name: secret-name-generator + inputDefinitions: + parameters: + configmap_parm: + defaultValue: cfg-2 + isOptional: true + parameterType: STRING + container_image: + defaultValue: python:3.9 + isOptional: true + parameterType: STRING + cpu_limit: + defaultValue: 200m + isOptional: true + parameterType: STRING + default_node_affinity_input: + defaultValue: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: kubernetes.io/os + operator: In + values: + - linux + isOptional: true + parameterType: STRUCT + empty_dir_mnt_path: + defaultValue: /empty_dir/path + isOptional: true + parameterType: STRING + field_path: + defaultValue: spec.serviceAccountName + isOptional: true + parameterType: STRING + memory_limit: + defaultValue: 500Mi + isOptional: true + parameterType: STRING + node_selector_input: + defaultValue: + kubernetes.io/os: linux + isOptional: true + parameterType: STRUCT + pull_secret_1: + defaultValue: pull-secret-1 + isOptional: true + parameterType: STRING + pull_secret_2: + defaultValue: pull-secret-2 + isOptional: true + parameterType: STRING + pull_secret_3: + defaultValue: pull-secret-3 + isOptional: true + parameterType: STRING + pvc_name_suffix_input: + defaultValue: -pvc-1 + isOptional: true + parameterType: STRING + secret_param: + defaultValue: secret-2 + isOptional: true + parameterType: STRING + tolerations_dict_input: + defaultValue: + effect: NoSchedule + key: some_foo_key6 + operator: Equal + value: value3 + isOptional: true + parameterType: STRUCT + tolerations_list_input: + defaultValue: + - effect: NoSchedule + key: some_foo_key4 + operator: Equal + value: value2 + - effect: NoExecute + key: some_foo_key5 + operator: Exists + isOptional: true + parameterType: LIST +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.6 +--- +platforms: + kubernetes: + deploymentSpec: + executors: + exec-assert-values: + configMapAsEnv: + - configMapName: cfg-1 + configMapNameParameter: + runtimeValue: + constant: cfg-1 + keyToEnv: + - configMapKey: cfgKey1 + envVar: CFG_KEY_1 + - configMapKey: cfgKey2 + envVar: CFG_KEY_2 + optional: false + - configMapNameParameter: + componentInputParameter: configmap_parm + keyToEnv: + - configMapKey: cfgKey3 + envVar: CFG_KEY_3 + optional: false + - configMapNameParameter: + taskOutputParameter: + outputParameterKey: some_output + producerTask: cfg-name-generator + keyToEnv: + - configMapKey: cfgKey4 + envVar: CFG_KEY_4 + optional: false + configMapAsVolume: + - configMapNameParameter: + componentInputParameter: configmap_parm + mountPath: /tmp/config_map + optional: false + imagePullSecret: + - secretNameParameter: + componentInputParameter: pull_secret_1 + - secretNameParameter: + componentInputParameter: pull_secret_2 + - secretName: pull-secret-1 + secretNameParameter: + runtimeValue: + constant: pull-secret-1 + - secretName: pull-secret-2 + secretNameParameter: + runtimeValue: + constant: pull-secret-2 + - secretNameParameter: + componentInputParameter: pull_secret_3 + - secretName: pull-secret-4 + secretNameParameter: + runtimeValue: + constant: pull-secret-4 + nodeAffinity: + - nodeAffinityJson: + taskOutputParameter: + outputParameterKey: node_affinity + producerTask: get-node-affinity + nodeSelector: + nodeSelectorJson: + runtimeValue: + constant: + kubernetes.io/arch: amd64 + pvcMount: + - mountPath: /pvc/path + pvcNameParameter: + taskOutputParameter: + outputParameterKey: name + producerTask: createpvc + taskOutputParameter: + outputParameterKey: name + producerTask: createpvc + secretAsEnv: + - keyToEnv: + - envVar: SECRET_KEY_1 + secretKey: secretKey1 + - envVar: SECRET_KEY_2 + secretKey: secretKey2 + optional: false + secretName: secret-1 + secretNameParameter: + runtimeValue: + constant: secret-1 + - keyToEnv: + - envVar: SECRET_KEY_3 + secretKey: secretKey3 + optional: false + secretNameParameter: + componentInputParameter: secret_param + - keyToEnv: + - envVar: SECRET_KEY_4 + secretKey: secretKey4 + optional: false + secretNameParameter: + taskOutputParameter: + outputParameterKey: some_output + producerTask: secret-name-generator + secretAsVolume: + - mountPath: /tmp/secret + optional: false + secretNameParameter: + componentInputParameter: secret_param + tolerations: + - effect: NoSchedule + key: some_foo_key1 + operator: Equal + value: value1 + - effect: NoExecute + key: some_foo_key2 + operator: Exists + - effect: NoSchedule + key: some_foo_key3 + operator: Equal + value: value1 + - tolerationJson: + componentInputParameter: tolerations_dict_input + - tolerationJson: + componentInputParameter: tolerations_list_input + exec-assert-values-three: + tolerations: + - tolerationJson: + componentInputParameter: train_tolerations + exec-assert-values-two: + nodeAffinity: + - nodeAffinityJson: + componentInputParameter: default_node_affinity_input + nodeSelector: + nodeSelectorJson: + componentInputParameter: node_selector_input diff --git a/backend/src/v2/driver/test_data/loop_collected_InputParameter_Iterator.py b/backend/src/v2/driver/test_data/loop_collected_InputParameter_Iterator.py new file mode 100755 index 00000000000..2854b071177 --- /dev/null +++ b/backend/src/v2/driver/test_data/loop_collected_InputParameter_Iterator.py @@ -0,0 +1,64 @@ +import functools +from typing import List + +from kfp import dsl +from kfp.dsl import ( + Output, + Artifact, + component, pipeline, ParallelFor, Collected +) + +base_image="quay.io/opendatahub/ds-pipelines-ci-executor-image:v1.0" +dsl.component = functools.partial(dsl.component, base_image=base_image) + + +@component() +def split_ids(model_ids: str) -> list: + return model_ids.split(',') + +@component() +def create_file(file: Output[Artifact], content: str): + print(f'Creating file with content: {content}') + with open(file.path, 'w') as f: + f.write(content) + +@component() +def read_values(values: List[str]) -> str: + collect = [] + for v in values: + collect.append(v) + print(collect) + assert sorted(collect) == sorted(['s1', 's2', 's3', 's4']) + return 'values read' + + +@component() +def read_single_file(file: Artifact, expected: str) -> str: + print(f'Reading file: {file.path}') + with open(file.path, 'r') as f: + data = f.read() + print(data) + assert expected == data + return data + +@pipeline() +def secondary_pipeline(model_ids: str = '',) -> List[str]: + ids_split_op = split_ids(model_ids=model_ids) + with ParallelFor(ids_split_op.output) as model_id: + create_file_op = create_file(content=model_id) + read_single_file_task = read_single_file(file=create_file_op.outputs['file'], expected=model_id) + read_values(values=Collected(read_single_file_task.output)) + return Collected(read_single_file_task.output) + + +@pipeline() +def primary_pipeline(): + model_ids = 's1,s2,s3,s4' + dag = secondary_pipeline(model_ids=model_ids) + read_values(values=dag.output) + +if __name__ == '__main__': + from kfp import compiler + compiler.Compiler().compile( + pipeline_func=primary_pipeline, + package_path=__file__.replace('.py', '.yaml')) diff --git a/backend/src/v2/driver/test_data/loop_collected_InputParameter_Iterator.yaml b/backend/src/v2/driver/test_data/loop_collected_InputParameter_Iterator.yaml new file mode 100644 index 00000000000..6132aec7b15 --- /dev/null +++ b/backend/src/v2/driver/test_data/loop_collected_InputParameter_Iterator.yaml @@ -0,0 +1,354 @@ +# PIPELINE DEFINITION +# Name: primary-pipeline +components: + comp-create-file: + executorLabel: exec-create-file + inputDefinitions: + parameters: + content: + parameterType: STRING + outputDefinitions: + artifacts: + file: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-for-loop-1: + dag: + outputs: + parameters: + pipelinechannel--read-single-file-Output: + valueFromParameter: + outputParameterKey: Output + producerSubtask: read-single-file + tasks: + create-file: + cachingOptions: {} + componentRef: + name: comp-create-file + inputs: + parameters: + content: + componentInputParameter: pipelinechannel--split-ids-Output-loop-item + taskInfo: + name: create-file + read-single-file: + cachingOptions: {} + componentRef: + name: comp-read-single-file + dependentTasks: + - create-file + inputs: + artifacts: + file: + taskOutputArtifact: + outputArtifactKey: file + producerTask: create-file + parameters: + expected: + componentInputParameter: pipelinechannel--split-ids-Output-loop-item + taskInfo: + name: read-single-file + inputDefinitions: + parameters: + pipelinechannel--split-ids-Output: + parameterType: LIST + pipelinechannel--split-ids-Output-loop-item: + parameterType: STRING + outputDefinitions: + parameters: + pipelinechannel--read-single-file-Output: + parameterType: LIST + comp-read-single-file: + executorLabel: exec-read-single-file + inputDefinitions: + artifacts: + file: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + parameters: + expected: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-read-values: + executorLabel: exec-read-values + inputDefinitions: + parameters: + values: + parameterType: LIST + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-read-values-2: + executorLabel: exec-read-values-2 + inputDefinitions: + parameters: + values: + parameterType: LIST + outputDefinitions: + parameters: + Output: + parameterType: STRING + comp-secondary-pipeline: + dag: + outputs: + parameters: + Output: + valueFromParameter: + outputParameterKey: pipelinechannel--read-single-file-Output + producerSubtask: for-loop-1 + tasks: + for-loop-1: + componentRef: + name: comp-for-loop-1 + dependentTasks: + - split-ids + inputs: + parameters: + pipelinechannel--split-ids-Output: + taskOutputParameter: + outputParameterKey: Output + producerTask: split-ids + parameterIterator: + itemInput: pipelinechannel--split-ids-Output-loop-item + items: + inputParameter: pipelinechannel--split-ids-Output + taskInfo: + name: for-loop-1 + read-values: + cachingOptions: {} + componentRef: + name: comp-read-values + dependentTasks: + - for-loop-1 + inputs: + parameters: + values: + taskOutputParameter: + outputParameterKey: pipelinechannel--read-single-file-Output + producerTask: for-loop-1 + taskInfo: + name: read-values + split-ids: + cachingOptions: {} + componentRef: + name: comp-split-ids + inputs: + parameters: + model_ids: + componentInputParameter: model_ids + taskInfo: + name: split-ids + inputDefinitions: + parameters: + model_ids: + defaultValue: '' + isOptional: true + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: LIST + comp-split-ids: + executorLabel: exec-split-ids + inputDefinitions: + parameters: + model_ids: + parameterType: STRING + outputDefinitions: + parameters: + Output: + parameterType: LIST +deploymentSpec: + executors: + exec-create-file: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - create_file + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef create_file(file: Output[Artifact], content: str):\n print(f'Creating\ + \ file with content: {content}')\n with open(file.path, 'w') as f:\n\ + \ f.write(content)\n\n" + image: python:3.11 + exec-read-single-file: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - read_single_file + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef read_single_file(file: Artifact, expected: str) -> str:\n \ + \ print(f'Reading file: {file.path}')\n with open(file.path, 'r') as\ + \ f:\n data = f.read()\n print(data)\n assert expected\ + \ == data\n return data\n\n" + image: python:3.11 + exec-read-values: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - read_values + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef read_values(values: List[str]) -> str:\n collect = []\n \ + \ for v in values:\n collect.append(v)\n print(collect)\n \ + \ # TODO(HumairAK): These should not be required to be sorted to maintain\ + \ backwards compatibility\n assert sorted(collect) == sorted(['s1', 's2',\ + \ 's3', 's4'])\n return 'values read'\n\n" + image: python:3.11 + exec-read-values-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - read_values + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef read_values(values: List[str]) -> str:\n collect = []\n \ + \ for v in values:\n collect.append(v)\n print(collect)\n \ + \ # TODO(HumairAK): These should not be required to be sorted to maintain\ + \ backwards compatibility\n assert sorted(collect) == sorted(['s1', 's2',\ + \ 's3', 's4'])\n return 'values read'\n\n" + image: python:3.11 + exec-split-ids: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - split_ids + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef split_ids(model_ids: str) -> list:\n return model_ids.split(',')\n\ + \n" + image: python:3.11 +pipelineInfo: + name: primary-pipeline +root: + dag: + tasks: + read-values: + cachingOptions: {} + componentRef: + name: comp-read-values-2 + dependentTasks: + - secondary-pipeline + inputs: + parameters: + values: + taskOutputParameter: + outputParameterKey: Output + producerTask: secondary-pipeline + taskInfo: + name: read-values + secondary-pipeline: + cachingOptions: {} + componentRef: + name: comp-secondary-pipeline + inputs: + parameters: + model_ids: + runtimeValue: + constant: s1,s2,s3,s4 + taskInfo: + name: secondary-pipeline +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.6 diff --git a/backend/src/v2/driver/test_data/loop_collected_raw_Iterator.py b/backend/src/v2/driver/test_data/loop_collected_raw_Iterator.py new file mode 100755 index 00000000000..d88cc5e29bb --- /dev/null +++ b/backend/src/v2/driver/test_data/loop_collected_raw_Iterator.py @@ -0,0 +1,85 @@ +import functools +from typing import List + +from kfp import dsl +from kfp.dsl import ( + Input, + Output, + Artifact, + Dataset, + component +) + +base_image="quay.io/opendatahub/ds-pipelines-ci-executor-image:v1.0" +dsl.component = functools.partial(dsl.component, base_image=base_image) + +@component +def create_dataset(output_dataset: Output[Dataset]): + with open(output_dataset.path, "w") as f: + f.write('cat') + output_dataset.metadata["item_count"] = 5 + output_dataset.metadata["description"] = "A simple dataset with integers" + +@component +def process_dataset( + model_id_in: str, + input_dataset: Input[Dataset], + output_artifact: Output[Artifact], +): + with open(input_dataset.path, "r") as f: + data = f.read() + with open(output_artifact.path, "w") as f: + data_out = f"{data}-{model_id_in}" + f.write(data_out) + print(data_out) + output_artifact.metadata["model_id"] = model_id_in + +@component +def analyze_artifact(analyze_artifact_input: Input[Artifact], analyze_output_artifact: Output[Artifact]): + with open(analyze_artifact_input.path, "r") as f: + data = f.read() + with open(analyze_output_artifact.path, "w") as f: + f.write(f'{{"values": {data}}}') + +@component +def analyze_artifact_list(artifact_list_input: List[Artifact]): + expected_values = ['cat-1', 'cat-2', 'cat-3'] + expected_metadata = ['1', '2', '3'] + actual_values = [] + actual_metadata = [] + for artifact in artifact_list_input: + with open(artifact.path, "r") as f: + data = f.read() + actual_values.append(data) + actual_metadata.append(artifact.metadata["model_id"]) + + print("actual_values: ", actual_values) + print("actual_metadata: ", actual_metadata) + print("expected_values: ", expected_values) + print("expected_metadata: ", expected_metadata) + assert sorted(actual_values) == sorted(expected_values) + assert sorted(actual_metadata) == sorted(expected_metadata) + +@dsl.pipeline +def secondary_pipeline() -> List[Artifact]: + create_dataset_task = create_dataset() + with dsl.ParallelFor(items=['1', '2', '3']) as model_id: + process_dataset_task = process_dataset(model_id_in=model_id, input_dataset=create_dataset_task.outputs['output_dataset']) + analyze_artifact(analyze_artifact_input=process_dataset_task.outputs["output_artifact"]) + + # Case one, pass collected result as TaskOutput to analyze artifact + analyze_artifact_list(artifact_list_input=dsl.Collected(process_dataset_task.outputs["output_artifact"])) + + # Case two, return the collected result for dag.outputs + return dsl.Collected(process_dataset_task.outputs["output_artifact"]) + +@dsl.pipeline +def primary_pipeline(): + secondary_pipeline_output = secondary_pipeline() + analyze_artifact_list(artifact_list_input=secondary_pipeline_output.output) + +if __name__ == '__main__': + from kfp import compiler + compiler.Compiler().compile( + pipeline_func=primary_pipeline, + package_path=__file__.replace('.py', '.yaml')) diff --git a/backend/src/v2/driver/test_data/loop_collected_raw_Iterator.yaml b/backend/src/v2/driver/test_data/loop_collected_raw_Iterator.yaml new file mode 100644 index 00000000000..7b375991b20 --- /dev/null +++ b/backend/src/v2/driver/test_data/loop_collected_raw_Iterator.yaml @@ -0,0 +1,367 @@ +# PIPELINE DEFINITION +# Name: primary-pipeline +components: + comp-analyze-artifact: + executorLabel: exec-analyze-artifact + inputDefinitions: + artifacts: + analyze_artifact_input: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + analyze_output_artifact: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-analyze-artifact-list: + executorLabel: exec-analyze-artifact-list + inputDefinitions: + artifacts: + artifact_list_input: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + isArtifactList: true + comp-analyze-artifact-list-2: + executorLabel: exec-analyze-artifact-list-2 + inputDefinitions: + artifacts: + artifact_list_input: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + isArtifactList: true + comp-create-dataset: + executorLabel: exec-create-dataset + outputDefinitions: + artifacts: + output_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-for-loop-2: + dag: + outputs: + artifacts: + pipelinechannel--process-dataset-output_artifact: + artifactSelectors: + - outputArtifactKey: output_artifact + producerSubtask: process-dataset + tasks: + analyze-artifact: + cachingOptions: {} + componentRef: + name: comp-analyze-artifact + dependentTasks: + - process-dataset + inputs: + artifacts: + analyze_artifact_input: + taskOutputArtifact: + outputArtifactKey: output_artifact + producerTask: process-dataset + taskInfo: + name: analyze-artifact + process-dataset: + cachingOptions: {} + componentRef: + name: comp-process-dataset + inputs: + artifacts: + input_dataset: + componentInputArtifact: pipelinechannel--create-dataset-output_dataset + parameters: + model_id_in: + componentInputParameter: pipelinechannel--loop-item-param-1 + taskInfo: + name: process-dataset + inputDefinitions: + artifacts: + pipelinechannel--create-dataset-output_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + parameters: + pipelinechannel--loop-item-param-1: + parameterType: STRING + outputDefinitions: + artifacts: + pipelinechannel--process-dataset-output_artifact: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + isArtifactList: true + comp-process-dataset: + executorLabel: exec-process-dataset + inputDefinitions: + artifacts: + input_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + parameters: + model_id_in: + parameterType: STRING + outputDefinitions: + artifacts: + output_artifact: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-secondary-pipeline: + dag: + outputs: + artifacts: + Output: + artifactSelectors: + - outputArtifactKey: pipelinechannel--process-dataset-output_artifact + producerSubtask: for-loop-2 + tasks: + analyze-artifact-list: + cachingOptions: {} + componentRef: + name: comp-analyze-artifact-list + dependentTasks: + - for-loop-2 + inputs: + artifacts: + artifact_list_input: + taskOutputArtifact: + outputArtifactKey: pipelinechannel--process-dataset-output_artifact + producerTask: for-loop-2 + taskInfo: + name: analyze-artifact-list + create-dataset: + cachingOptions: {} + componentRef: + name: comp-create-dataset + taskInfo: + name: create-dataset + for-loop-2: + componentRef: + name: comp-for-loop-2 + dependentTasks: + - create-dataset + inputs: + artifacts: + pipelinechannel--create-dataset-output_dataset: + taskOutputArtifact: + outputArtifactKey: output_dataset + producerTask: create-dataset + parameterIterator: + itemInput: pipelinechannel--loop-item-param-1 + items: + raw: '["1", "2", "3"]' + taskInfo: + name: for-loop-2 + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + isArtifactList: true +deploymentSpec: + executors: + exec-analyze-artifact: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - analyze_artifact + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef analyze_artifact(analyze_artifact_input: Input[Artifact], analyze_output_artifact:\ + \ Output[Artifact]):\n with open(analyze_artifact_input.path, \"r\")\ + \ as f:\n data = f.read()\n with open(analyze_output_artifact.path,\ + \ \"w\") as f:\n f.write(f'{{\"values\": {data}}}')\n\n" + image: python:3.11 + exec-analyze-artifact-list: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - analyze_artifact_list + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef analyze_artifact_list(artifact_list_input: List[Artifact]):\n\ + \ expected_values = ['cat-1', 'cat-2', 'cat-3']\n expected_metadata\ + \ = ['1', '2', '3']\n actual_values = []\n actual_metadata = []\n\ + \ for artifact in artifact_list_input:\n with open(artifact.path,\ + \ \"r\") as f:\n data = f.read()\n actual_values.append(data)\n\ + \ actual_metadata.append(artifact.metadata[\"model_id\"])\n\n\ + \ print(\"actual_values: \", actual_values)\n print(\"actual_metadata:\ + \ \", actual_metadata)\n print(\"expected_values: \", expected_values)\n\ + \ print(\"expected_metadata: \", expected_metadata)\n # TODO(HumairAK):\ + \ These should not be required to be sorted to maintain backwards compatibility\n\ + \ assert sorted(actual_values) == sorted(expected_values)\n assert\ + \ sorted(actual_metadata) == sorted(expected_metadata)\n\n" + image: python:3.11 + exec-analyze-artifact-list-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - analyze_artifact_list + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef analyze_artifact_list(artifact_list_input: List[Artifact]):\n\ + \ expected_values = ['cat-1', 'cat-2', 'cat-3']\n expected_metadata\ + \ = ['1', '2', '3']\n actual_values = []\n actual_metadata = []\n\ + \ for artifact in artifact_list_input:\n with open(artifact.path,\ + \ \"r\") as f:\n data = f.read()\n actual_values.append(data)\n\ + \ actual_metadata.append(artifact.metadata[\"model_id\"])\n\n\ + \ print(\"actual_values: \", actual_values)\n print(\"actual_metadata:\ + \ \", actual_metadata)\n print(\"expected_values: \", expected_values)\n\ + \ print(\"expected_metadata: \", expected_metadata)\n # TODO(HumairAK):\ + \ These should not be required to be sorted to maintain backwards compatibility\n\ + \ assert sorted(actual_values) == sorted(expected_values)\n assert\ + \ sorted(actual_metadata) == sorted(expected_metadata)\n\n" + image: python:3.11 + exec-create-dataset: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - create_dataset + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef create_dataset(output_dataset: Output[Dataset]):\n with open(output_dataset.path,\ + \ \"w\") as f:\n f.write('cat')\n output_dataset.metadata[\"item_count\"\ + ] = 5\n output_dataset.metadata[\"description\"] = \"A simple dataset\ + \ with integers\"\n\n" + image: python:3.11 + exec-process-dataset: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - process_dataset + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef process_dataset(\n model_id_in: str,\n input_dataset:\ + \ Input[Dataset],\n output_artifact: Output[Artifact],\n):\n with\ + \ open(input_dataset.path, \"r\") as f:\n data = f.read()\n with\ + \ open(output_artifact.path, \"w\") as f:\n data_out = f\"{data}-{model_id_in}\"\ + \n f.write(data_out)\n print(data_out)\n output_artifact.metadata[\"\ + model_id\"] = model_id_in\n\n" + image: python:3.11 +pipelineInfo: + name: primary-pipeline +root: + dag: + tasks: + analyze-artifact-list: + cachingOptions: {} + componentRef: + name: comp-analyze-artifact-list-2 + dependentTasks: + - secondary-pipeline + inputs: + artifacts: + artifact_list_input: + taskOutputArtifact: + outputArtifactKey: Output + producerTask: secondary-pipeline + taskInfo: + name: analyze-artifact-list + secondary-pipeline: + cachingOptions: {} + componentRef: + name: comp-secondary-pipeline + taskInfo: + name: secondary-pipeline +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.6 diff --git a/backend/src/v2/driver/test_data/nested_naming_conflicts.py b/backend/src/v2/driver/test_data/nested_naming_conflicts.py new file mode 100644 index 00000000000..87ce5d88b06 --- /dev/null +++ b/backend/src/v2/driver/test_data/nested_naming_conflicts.py @@ -0,0 +1,78 @@ +import functools + +from kfp import dsl +from kfp.dsl import ( + Input, + Output, + Artifact, + Dataset, + component, + pipeline +) + +base_image="quay.io/opendatahub/ds-pipelines-ci-executor-image:v1.0" +dsl.component = functools.partial(dsl.component, base_image=base_image) + +@component +def a(situation: str, output_dataset: Output[Dataset]): + with open(output_dataset.path, "w") as f: + f.write(situation) + +@component +def b(input_dataset: Input[Dataset], output_artifact_b: Output[Artifact]): + with open(input_dataset.path, "r") as f: + data = f.read() + + if data == "hurricane": + analysis = "very_bad" + elif data == "sunny": + analysis = "very_good" + else: + analysis = "not_bad" + + with open(output_artifact_b.path, "w") as f: + f.write(analysis) + +@component +def c(artifact: Input[Artifact], output_artifact_c: Output[Artifact]): + with open(artifact.path, "r") as f: + data = f.read() + assert data == "very_bad" + with open(output_artifact_c.path, "w") as f: + f.write(f'done_analyzing') + +@component +def verify(verify_input: Input[Artifact]): + with open(verify_input.path, "r") as f: + data = f.read() + assert data == "done_analyzing" + +@pipeline +def pipeline_c(input_dataset_a: Input[Dataset], input_dataset_b: Input[Dataset]) -> Artifact: + a_task = a(situation="hurricane") + b_task = b(input_dataset=a_task.outputs["output_dataset"]) + c_task = c(artifact=b_task.outputs["output_artifact_b"]) + return c_task.outputs["output_artifact_c"] + + +@pipeline +def pipeline_b(input_dataset: Input[Dataset]) -> Artifact: + a_task = a(situation="raining") + b_task = b(input_dataset=input_dataset) + pipeline_c_op = pipeline_c( + input_dataset_a=a_task.outputs["output_dataset"], + input_dataset_b=b_task.outputs["output_artifact_b"], + ) + return pipeline_c_op.output + +@pipeline +def pipeline_a(): + a_task = a(situation="sunny") + nested_pipeline_op = pipeline_b(input_dataset=a_task.outputs["output_dataset"]) + verify(verify_input=nested_pipeline_op.output) +if __name__ == '__main__': + from kfp import compiler + + compiler.Compiler().compile( + pipeline_func=pipeline_a, + package_path=__file__.replace('.py', '.yaml')) diff --git a/backend/src/v2/driver/test_data/nested_naming_conflicts.yaml b/backend/src/v2/driver/test_data/nested_naming_conflicts.yaml new file mode 100644 index 00000000000..658ee3217ce --- /dev/null +++ b/backend/src/v2/driver/test_data/nested_naming_conflicts.yaml @@ -0,0 +1,476 @@ +# PIPELINE DEFINITION +# Name: pipeline-a +components: + comp-a: + executorLabel: exec-a + inputDefinitions: + parameters: + situation: + parameterType: STRING + outputDefinitions: + artifacts: + output_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-a-2: + executorLabel: exec-a-2 + inputDefinitions: + parameters: + situation: + parameterType: STRING + outputDefinitions: + artifacts: + output_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-a-3: + executorLabel: exec-a-3 + inputDefinitions: + parameters: + situation: + parameterType: STRING + outputDefinitions: + artifacts: + output_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-b: + executorLabel: exec-b + inputDefinitions: + artifacts: + input_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + output_artifact_b: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-b-2: + executorLabel: exec-b-2 + inputDefinitions: + artifacts: + input_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + output_artifact_b: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-c: + executorLabel: exec-c + inputDefinitions: + artifacts: + artifact: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + output_artifact_c: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-pipeline-b: + dag: + outputs: + artifacts: + Output: + artifactSelectors: + - outputArtifactKey: Output + producerSubtask: pipeline-c + tasks: + a: + cachingOptions: {} + componentRef: + name: comp-a-3 + inputs: + parameters: + situation: + runtimeValue: + constant: raining + taskInfo: + name: a + b: + cachingOptions: {} + componentRef: + name: comp-b + inputs: + artifacts: + input_dataset: + componentInputArtifact: input_dataset + taskInfo: + name: b + pipeline-c: + cachingOptions: {} + componentRef: + name: comp-pipeline-c + dependentTasks: + - a + - b + inputs: + artifacts: + input_dataset_a: + taskOutputArtifact: + outputArtifactKey: output_dataset + producerTask: a + input_dataset_b: + taskOutputArtifact: + outputArtifactKey: output_artifact_b + producerTask: b + taskInfo: + name: pipeline-c + inputDefinitions: + artifacts: + input_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-pipeline-c: + dag: + outputs: + artifacts: + Output: + artifactSelectors: + - outputArtifactKey: output_artifact_c + producerSubtask: c + tasks: + a: + cachingOptions: {} + componentRef: + name: comp-a-2 + inputs: + parameters: + situation: + runtimeValue: + constant: hurricane + taskInfo: + name: a + b: + cachingOptions: {} + componentRef: + name: comp-b-2 + dependentTasks: + - a + inputs: + artifacts: + input_dataset: + taskOutputArtifact: + outputArtifactKey: output_dataset + producerTask: a + taskInfo: + name: b + c: + cachingOptions: {} + componentRef: + name: comp-c + dependentTasks: + - b + inputs: + artifacts: + artifact: + taskOutputArtifact: + outputArtifactKey: output_artifact_b + producerTask: b + taskInfo: + name: c + inputDefinitions: + artifacts: + input_dataset_a: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + input_dataset_b: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-verify: + executorLabel: exec-verify + inputDefinitions: + artifacts: + verify_input: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-a: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - a + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef a(situation: str, output_dataset: Output[Dataset]):\n with\ + \ open(output_dataset.path, \"w\") as f:\n f.write(situation)\n\n" + image: python:3.11 + exec-a-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - a + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef a(situation: str, output_dataset: Output[Dataset]):\n with\ + \ open(output_dataset.path, \"w\") as f:\n f.write(situation)\n\n" + image: python:3.11 + exec-a-3: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - a + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef a(situation: str, output_dataset: Output[Dataset]):\n with\ + \ open(output_dataset.path, \"w\") as f:\n f.write(situation)\n\n" + image: python:3.11 + exec-b: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - b + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef b(input_dataset: Input[Dataset], output_artifact_b: Output[Artifact]):\n\ + \ with open(input_dataset.path, \"r\") as f:\n data = f.read()\n\ + \n if data == \"hurricane\":\n analysis = \"very_bad\"\n elif\ + \ data == \"sunny\":\n analysis = \"very_good\"\n else:\n \ + \ analysis = \"not_bad\"\n\n with open(output_artifact_b.path, \"\ + w\") as f:\n f.write(analysis)\n\n" + image: python:3.11 + exec-b-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - b + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef b(input_dataset: Input[Dataset], output_artifact_b: Output[Artifact]):\n\ + \ with open(input_dataset.path, \"r\") as f:\n data = f.read()\n\ + \n if data == \"hurricane\":\n analysis = \"very_bad\"\n elif\ + \ data == \"sunny\":\n analysis = \"very_good\"\n else:\n \ + \ analysis = \"not_bad\"\n\n with open(output_artifact_b.path, \"\ + w\") as f:\n f.write(analysis)\n\n" + image: python:3.11 + exec-c: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - c + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef c(artifact: Input[Artifact], output_artifact_c: Output[Artifact]):\n\ + \ with open(artifact.path, \"r\") as f:\n data = f.read()\n \ + \ assert data == \"very_bad\"\n with open(output_artifact_c.path, \"\ + w\") as f:\n f.write(f'done_analyzing')\n\n" + image: python:3.11 + exec-verify: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - verify + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef verify(verify_input: Input[Artifact]):\n with open(verify_input.path,\ + \ \"r\") as f:\n data = f.read()\n assert data == \"done_analyzing\"\ + \n\n" + image: python:3.11 +pipelineInfo: + name: pipeline-a +root: + dag: + tasks: + a: + cachingOptions: {} + componentRef: + name: comp-a + inputs: + parameters: + situation: + runtimeValue: + constant: sunny + taskInfo: + name: a + pipeline-b: + cachingOptions: {} + componentRef: + name: comp-pipeline-b + dependentTasks: + - a + inputs: + artifacts: + input_dataset: + taskOutputArtifact: + outputArtifactKey: output_dataset + producerTask: a + taskInfo: + name: pipeline-b + verify: + cachingOptions: {} + componentRef: + name: comp-verify + dependentTasks: + - pipeline-b + inputs: + artifacts: + verify_input: + taskOutputArtifact: + outputArtifactKey: Output + producerTask: pipeline-b + taskInfo: + name: verify +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.6 diff --git a/backend/src/v2/driver/test_data/nested_pipeline_opt_input_child_level.py b/backend/src/v2/driver/test_data/nested_pipeline_opt_input_child_level.py new file mode 100644 index 00000000000..4a9cef29ff5 --- /dev/null +++ b/backend/src/v2/driver/test_data/nested_pipeline_opt_input_child_level.py @@ -0,0 +1,71 @@ +import functools + +from kfp import compiler +from kfp import dsl + +base_image="quay.io/opendatahub/ds-pipelines-ci-executor-image:v1.0" +dsl.component = functools.partial(dsl.component, base_image=base_image) + +@dsl.component() +def component_a_str(componentInputStr: str = None): + if componentInputStr != 'Input - pipeline': + raise ValueError(f"componentInputStr should be 'Input - pipeline' but is {componentInputStr}") + + +@dsl.component() +def component_b_str(componentInputStr: str = None): + if componentInputStr != 'Input 2 - nested pipeline': + raise ValueError(f"componentInputStr should be 'Input 2 - nested pipeline' but is {componentInputStr}") + +@dsl.component() +def component_a_int(componentInputInt: int = None): + if componentInputInt != 1: + raise ValueError(f"componentInputInt should be 1 but is {componentInputInt}") + +@dsl.component() +def component_b_int(componentInputInt: int = None): + if componentInputInt != 0: + raise ValueError(f"componentInputInt should be 0 but is {componentInputInt}") + +@dsl.component() +def component_a_bool(componentInputBool: bool = None): + if componentInputBool != True: + raise ValueError(f"componentInputBool should be True but is {componentInputBool}") + +@dsl.component() +def component_b_bool(componentInputBool: bool = None): + if componentInputBool != False: + raise ValueError(f"componentInputBool should be False but is {componentInputBool}") + +@dsl.pipeline() +def nested_pipeline( + nestedInputStr1: str = 'Input 1 - nested pipeline', + nestedInputStr2: str = 'Input 2 - nested pipeline', + nestedInputInt1: int = 0, + nestedInputInt2: int = 0, + nestedInputBool1: bool = False, + nestedInputBool2: bool = False +): + component_a_str(componentInputStr=nestedInputStr1).set_caching_options(False) + component_b_str(componentInputStr=nestedInputStr2).set_caching_options(False) + + component_a_int(componentInputInt=nestedInputInt1).set_caching_options(False) + component_b_int(componentInputInt=nestedInputInt2).set_caching_options(False) + + component_a_bool(componentInputBool=nestedInputBool1).set_caching_options(False) + component_b_bool(componentInputBool=nestedInputBool2).set_caching_options(False) + + +@dsl.pipeline() +def nested_pipeline_opt_input_child_level(): + # validate that input value overrides default value, and that when input is not provided, default is used. + nested_pipeline( + nestedInputStr1='Input - pipeline', + nestedInputInt1=1, + nestedInputBool1=True).set_caching_options(False) + + + +if __name__ == '__main__': + compiler.Compiler().compile(pipeline_func=nested_pipeline_opt_input_child_level, package_path=__file__.replace('.py', '_compiled.yaml')) + diff --git a/backend/src/v2/driver/test_data/nested_pipeline_opt_input_child_level_compiled.yaml b/backend/src/v2/driver/test_data/nested_pipeline_opt_input_child_level_compiled.yaml new file mode 100644 index 00000000000..8655c2023e9 --- /dev/null +++ b/backend/src/v2/driver/test_data/nested_pipeline_opt_input_child_level_compiled.yaml @@ -0,0 +1,340 @@ +# PIPELINE DEFINITION +# Name: nested-pipeline-opt-input-child-level +components: + comp-component-a-bool: + executorLabel: exec-component-a-bool + inputDefinitions: + parameters: + componentInputBool: + isOptional: true + parameterType: BOOLEAN + comp-component-a-int: + executorLabel: exec-component-a-int + inputDefinitions: + parameters: + componentInputInt: + isOptional: true + parameterType: NUMBER_INTEGER + comp-component-a-str: + executorLabel: exec-component-a-str + inputDefinitions: + parameters: + componentInputStr: + isOptional: true + parameterType: STRING + comp-component-b-bool: + executorLabel: exec-component-b-bool + inputDefinitions: + parameters: + componentInputBool: + isOptional: true + parameterType: BOOLEAN + comp-component-b-int: + executorLabel: exec-component-b-int + inputDefinitions: + parameters: + componentInputInt: + isOptional: true + parameterType: NUMBER_INTEGER + comp-component-b-str: + executorLabel: exec-component-b-str + inputDefinitions: + parameters: + componentInputStr: + isOptional: true + parameterType: STRING + comp-nested-pipeline: + dag: + tasks: + component-a-bool: + cachingOptions: {} + componentRef: + name: comp-component-a-bool + inputs: + parameters: + componentInputBool: + componentInputParameter: nestedInputBool1 + taskInfo: + name: component-a-bool + component-a-int: + cachingOptions: {} + componentRef: + name: comp-component-a-int + inputs: + parameters: + componentInputInt: + componentInputParameter: nestedInputInt1 + taskInfo: + name: component-a-int + component-a-str: + cachingOptions: {} + componentRef: + name: comp-component-a-str + inputs: + parameters: + componentInputStr: + componentInputParameter: nestedInputStr1 + taskInfo: + name: component-a-str + component-b-bool: + cachingOptions: {} + componentRef: + name: comp-component-b-bool + inputs: + parameters: + componentInputBool: + componentInputParameter: nestedInputBool2 + taskInfo: + name: component-b-bool + component-b-int: + cachingOptions: {} + componentRef: + name: comp-component-b-int + inputs: + parameters: + componentInputInt: + componentInputParameter: nestedInputInt2 + taskInfo: + name: component-b-int + component-b-str: + cachingOptions: {} + componentRef: + name: comp-component-b-str + inputs: + parameters: + componentInputStr: + componentInputParameter: nestedInputStr2 + taskInfo: + name: component-b-str + inputDefinitions: + parameters: + nestedInputBool1: + defaultValue: false + isOptional: true + parameterType: BOOLEAN + nestedInputBool2: + defaultValue: false + isOptional: true + parameterType: BOOLEAN + nestedInputInt1: + defaultValue: 0.0 + isOptional: true + parameterType: NUMBER_INTEGER + nestedInputInt2: + defaultValue: 0.0 + isOptional: true + parameterType: NUMBER_INTEGER + nestedInputStr1: + defaultValue: Input 1 - nested pipeline + isOptional: true + parameterType: STRING + nestedInputStr2: + defaultValue: Input 2 - nested pipeline + isOptional: true + parameterType: STRING +deploymentSpec: + executors: + exec-component-a-bool: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_a_bool + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_a_bool(componentInputBool: bool = None):\n if componentInputBool\ + \ != True:\n raise ValueError(f\"componentInputBool should be True\ + \ but is {componentInputBool}\")\n\n" + image: quay.io/opendatahub/ds-pipelines-ci-executor-image:v1.0 + exec-component-a-int: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_a_int + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_a_int(componentInputInt: int = None):\n if componentInputInt\ + \ != 1:\n raise ValueError(f\"componentInputInt should be 1 but is\ + \ {componentInputInt}\")\n\n" + image: quay.io/opendatahub/ds-pipelines-ci-executor-image:v1.0 + exec-component-a-str: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_a_str + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_a_str(componentInputStr: str = None):\n if componentInputStr\ + \ != 'Input - pipeline':\n raise ValueError(f\"componentInputStr\ + \ should be 'Input - pipeline' but is {componentInputStr}\")\n\n" + image: quay.io/opendatahub/ds-pipelines-ci-executor-image:v1.0 + exec-component-b-bool: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_b_bool + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_b_bool(componentInputBool: bool = None):\n if componentInputBool\ + \ != False:\n raise ValueError(f\"componentInputBool should be False\ + \ but is {componentInputBool}\")\n\n" + image: quay.io/opendatahub/ds-pipelines-ci-executor-image:v1.0 + exec-component-b-int: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_b_int + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_b_int(componentInputInt: int = None):\n if componentInputInt\ + \ != 0:\n raise ValueError(f\"componentInputInt should be 0 but is\ + \ {componentInputInt}\")\n\n" + image: quay.io/opendatahub/ds-pipelines-ci-executor-image:v1.0 + exec-component-b-str: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - component_b_str + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef component_b_str(componentInputStr: str = None):\n if componentInputStr\ + \ != 'Input 2 - nested pipeline':\n raise ValueError(f\"componentInputStr\ + \ should be 'Input 2 - nested pipeline' but is {componentInputStr}\")\n\n" + image: quay.io/opendatahub/ds-pipelines-ci-executor-image:v1.0 +pipelineInfo: + name: nested-pipeline-opt-input-child-level +root: + dag: + tasks: + nested-pipeline: + cachingOptions: {} + componentRef: + name: comp-nested-pipeline + inputs: + parameters: + nestedInputBool1: + runtimeValue: + constant: true + nestedInputInt1: + runtimeValue: + constant: 1.0 + nestedInputStr1: + runtimeValue: + constant: Input - pipeline + taskInfo: + name: nested-pipeline +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.6 diff --git a/backend/src/v2/driver/test_data/oneof_simple.py b/backend/src/v2/driver/test_data/oneof_simple.py new file mode 100644 index 00000000000..bfc361ae999 --- /dev/null +++ b/backend/src/v2/driver/test_data/oneof_simple.py @@ -0,0 +1,88 @@ +import functools + +from kfp import dsl +from kfp.dsl import ( + Input, + Output, + Artifact, + Dataset, + component +) + +base_image="quay.io/opendatahub/ds-pipelines-ci-executor-image:v1.0" +dsl.component = functools.partial(dsl.component, base_image=base_image) + + +@component +def create_dataset( + condition_to_activate: str, + output_dataset: Output[Dataset], + condition_out: dsl.OutputPath(str)): + with open(output_dataset.path, "w") as f: + f.write(condition_to_activate) + with open(condition_out, 'w') as f: + f.write(condition_to_activate) + +@component +def give_animal(want_animal: str, input_dataset: Input[Dataset], output_animal: Output[Artifact]): + with open(input_dataset.path, "r") as f: + data = f.read() + assert data == "second" + with open(output_animal.path, "w") as f: + f.write(want_animal) + +@component +def analyze_animal(animal_artifact: Input[Artifact], analysis_output: Output[Artifact]): + with open(animal_artifact.path, "r") as f: + data = f.read() + assert data == "dog" + with open(analysis_output.path, "w") as f: + f.write(f'done_analyzing') + +@component +def check_analysis(analysis_input: Input[Artifact]): + with open(analysis_input.path, "r") as f: + data = f.read() + assert data == "done_analyzing" + +@component +def check_animal(animal_input: Input[Artifact]): + with open(animal_input.path, "r") as f: + data = f.read() + assert data == "dog" + +@dsl.pipeline +def secondary_pipeline() -> Artifact: + t0 = create_dataset(condition_to_activate="second") + + with dsl.If('first' == t0.outputs['condition_out']): + give_cat = give_animal(want_animal="cat", input_dataset=t0.outputs['output_dataset']) + with dsl.Elif('second' == t0.outputs['condition_out']): + give_dog = give_animal(want_animal="dog", input_dataset=t0.outputs['output_dataset']) + analyze_dog = analyze_animal(animal_artifact=give_dog.outputs["output_animal"]) + with dsl.Else(): + give_mouse = give_animal(want_animal="mouse", input_dataset=t0.outputs['output_dataset']) + analyze_mouse = analyze_animal(animal_artifact=give_mouse.outputs["output_animal"]) + + one_analysis = dsl.OneOf( + analyze_dog.outputs['analysis_output'], + analyze_mouse.outputs['analysis_output'], + ) + + return dsl.OneOf( + give_cat.outputs['output_animal'], + give_dog.outputs['output_animal'], + give_mouse.outputs['output_animal']) + + +@dsl.pipeline +def primary_pipeline(): + secondary_pipeline_task = secondary_pipeline() + # This actually fails today the rest pass + check_animal(animal_input=secondary_pipeline_task.output) + +if __name__ == '__main__': + from kfp import compiler + compiler.Compiler().compile( + pipeline_func=primary_pipeline, + package_path=__file__.replace('.py', '.yaml')) diff --git a/backend/src/v2/driver/test_data/oneof_simple.yaml b/backend/src/v2/driver/test_data/oneof_simple.yaml new file mode 100644 index 00000000000..bf6c1606bde --- /dev/null +++ b/backend/src/v2/driver/test_data/oneof_simple.yaml @@ -0,0 +1,614 @@ +# PIPELINE DEFINITION +# Name: primary-pipeline +components: + comp-analyze-animal: + executorLabel: exec-analyze-animal + inputDefinitions: + artifacts: + animal_artifact: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + analysis_output: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-analyze-animal-2: + executorLabel: exec-analyze-animal-2 + inputDefinitions: + artifacts: + animal_artifact: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + analysis_output: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-check-animal: + executorLabel: exec-check-animal + inputDefinitions: + artifacts: + animal_input: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-condition-2: + dag: + outputs: + artifacts: + pipelinechannel--give-animal-output_animal: + artifactSelectors: + - outputArtifactKey: output_animal + producerSubtask: give-animal + tasks: + give-animal: + cachingOptions: {} + componentRef: + name: comp-give-animal + inputs: + artifacts: + input_dataset: + componentInputArtifact: pipelinechannel--create-dataset-output_dataset + parameters: + want_animal: + runtimeValue: + constant: cat + taskInfo: + name: give-animal + inputDefinitions: + artifacts: + pipelinechannel--create-dataset-output_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + parameters: + pipelinechannel--create-dataset-condition_out: + parameterType: STRING + outputDefinitions: + artifacts: + pipelinechannel--give-animal-output_animal: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-condition-3: + dag: + outputs: + artifacts: + pipelinechannel--give-animal-2-output_animal: + artifactSelectors: + - outputArtifactKey: output_animal + producerSubtask: give-animal-2 + tasks: + analyze-animal: + cachingOptions: {} + componentRef: + name: comp-analyze-animal + dependentTasks: + - give-animal-2 + inputs: + artifacts: + animal_artifact: + taskOutputArtifact: + outputArtifactKey: output_animal + producerTask: give-animal-2 + taskInfo: + name: analyze-animal + give-animal-2: + cachingOptions: {} + componentRef: + name: comp-give-animal-2 + inputs: + artifacts: + input_dataset: + componentInputArtifact: pipelinechannel--create-dataset-output_dataset + parameters: + want_animal: + runtimeValue: + constant: dog + taskInfo: + name: give-animal-2 + inputDefinitions: + artifacts: + pipelinechannel--create-dataset-output_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + parameters: + pipelinechannel--create-dataset-condition_out: + parameterType: STRING + outputDefinitions: + artifacts: + pipelinechannel--give-animal-2-output_animal: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-condition-4: + dag: + outputs: + artifacts: + pipelinechannel--give-animal-3-output_animal: + artifactSelectors: + - outputArtifactKey: output_animal + producerSubtask: give-animal-3 + tasks: + analyze-animal-2: + cachingOptions: {} + componentRef: + name: comp-analyze-animal-2 + dependentTasks: + - give-animal-3 + inputs: + artifacts: + animal_artifact: + taskOutputArtifact: + outputArtifactKey: output_animal + producerTask: give-animal-3 + taskInfo: + name: analyze-animal-2 + give-animal-3: + cachingOptions: {} + componentRef: + name: comp-give-animal-3 + inputs: + artifacts: + input_dataset: + componentInputArtifact: pipelinechannel--create-dataset-output_dataset + parameters: + want_animal: + runtimeValue: + constant: mouse + taskInfo: + name: give-animal-3 + inputDefinitions: + artifacts: + pipelinechannel--create-dataset-output_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + parameters: + pipelinechannel--create-dataset-condition_out: + parameterType: STRING + outputDefinitions: + artifacts: + pipelinechannel--give-animal-3-output_animal: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-condition-branches-1: + dag: + outputs: + artifacts: + pipelinechannel--condition-branches-1-oneof-2: + artifactSelectors: + - outputArtifactKey: pipelinechannel--give-animal-output_animal + producerSubtask: condition-2 + - outputArtifactKey: pipelinechannel--give-animal-2-output_animal + producerSubtask: condition-3 + - outputArtifactKey: pipelinechannel--give-animal-3-output_animal + producerSubtask: condition-4 + tasks: + condition-2: + componentRef: + name: comp-condition-2 + inputs: + artifacts: + pipelinechannel--create-dataset-output_dataset: + componentInputArtifact: pipelinechannel--create-dataset-output_dataset + parameters: + pipelinechannel--create-dataset-condition_out: + componentInputParameter: pipelinechannel--create-dataset-condition_out + taskInfo: + name: condition-2 + triggerPolicy: + condition: inputs.parameter_values['pipelinechannel--create-dataset-condition_out'] + == 'first' + condition-3: + componentRef: + name: comp-condition-3 + inputs: + artifacts: + pipelinechannel--create-dataset-output_dataset: + componentInputArtifact: pipelinechannel--create-dataset-output_dataset + parameters: + pipelinechannel--create-dataset-condition_out: + componentInputParameter: pipelinechannel--create-dataset-condition_out + taskInfo: + name: condition-3 + triggerPolicy: + condition: '!(inputs.parameter_values[''pipelinechannel--create-dataset-condition_out''] + == ''first'') && inputs.parameter_values[''pipelinechannel--create-dataset-condition_out''] + == ''second''' + condition-4: + componentRef: + name: comp-condition-4 + inputs: + artifacts: + pipelinechannel--create-dataset-output_dataset: + componentInputArtifact: pipelinechannel--create-dataset-output_dataset + parameters: + pipelinechannel--create-dataset-condition_out: + componentInputParameter: pipelinechannel--create-dataset-condition_out + taskInfo: + name: condition-4 + triggerPolicy: + condition: '!(inputs.parameter_values[''pipelinechannel--create-dataset-condition_out''] + == ''first'') && !(inputs.parameter_values[''pipelinechannel--create-dataset-condition_out''] + == ''second'')' + inputDefinitions: + artifacts: + pipelinechannel--create-dataset-output_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + parameters: + pipelinechannel--create-dataset-condition_out: + parameterType: STRING + outputDefinitions: + artifacts: + pipelinechannel--condition-branches-1-oneof-2: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-create-dataset: + executorLabel: exec-create-dataset + inputDefinitions: + parameters: + condition_to_activate: + parameterType: STRING + outputDefinitions: + artifacts: + output_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + parameters: + condition_out: + parameterType: STRING + comp-give-animal: + executorLabel: exec-give-animal + inputDefinitions: + artifacts: + input_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + parameters: + want_animal: + parameterType: STRING + outputDefinitions: + artifacts: + output_animal: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-give-animal-2: + executorLabel: exec-give-animal-2 + inputDefinitions: + artifacts: + input_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + parameters: + want_animal: + parameterType: STRING + outputDefinitions: + artifacts: + output_animal: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-give-animal-3: + executorLabel: exec-give-animal-3 + inputDefinitions: + artifacts: + input_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + parameters: + want_animal: + parameterType: STRING + outputDefinitions: + artifacts: + output_animal: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-secondary-pipeline: + dag: + outputs: + artifacts: + Output: + artifactSelectors: + - outputArtifactKey: pipelinechannel--condition-branches-1-oneof-2 + producerSubtask: condition-branches-1 + tasks: + condition-branches-1: + componentRef: + name: comp-condition-branches-1 + dependentTasks: + - create-dataset + inputs: + artifacts: + pipelinechannel--create-dataset-output_dataset: + taskOutputArtifact: + outputArtifactKey: output_dataset + producerTask: create-dataset + parameters: + pipelinechannel--create-dataset-condition_out: + taskOutputParameter: + outputParameterKey: condition_out + producerTask: create-dataset + taskInfo: + name: condition-branches-1 + create-dataset: + cachingOptions: {} + componentRef: + name: comp-create-dataset + inputs: + parameters: + condition_to_activate: + runtimeValue: + constant: second + taskInfo: + name: create-dataset + outputDefinitions: + artifacts: + Output: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-analyze-animal: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - analyze_animal + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef analyze_animal(animal_artifact: Input[Artifact], analysis_output:\ + \ Output[Artifact]):\n with open(animal_artifact.path, \"r\") as f:\n\ + \ data = f.read()\n assert data == \"dog\"\n with open(analysis_output.path,\ + \ \"w\") as f:\n f.write(f'done_analyzing')\n\n" + image: python:3.11 + exec-analyze-animal-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - analyze_animal + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef analyze_animal(animal_artifact: Input[Artifact], analysis_output:\ + \ Output[Artifact]):\n with open(animal_artifact.path, \"r\") as f:\n\ + \ data = f.read()\n assert data == \"dog\"\n with open(analysis_output.path,\ + \ \"w\") as f:\n f.write(f'done_analyzing')\n\n" + image: python:3.11 + exec-check-animal: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - check_animal + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef check_animal(animal_input: Input[Artifact]):\n with open(animal_input.path,\ + \ \"r\") as f:\n data = f.read()\n assert data == \"dog\"\n\n" + image: python:3.11 + exec-create-dataset: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - create_dataset + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef create_dataset(\n condition_to_activate: str,\n \ + \ output_dataset: Output[Dataset],\n condition_out: dsl.OutputPath(str)):\n\ + \ with open(output_dataset.path, \"w\") as f:\n f.write(condition_to_activate)\n\ + \ with open(condition_out, 'w') as f:\n f.write(condition_to_activate)\n\ + \n" + image: python:3.11 + exec-give-animal: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - give_animal + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef give_animal(want_animal: str, input_dataset: Input[Dataset],\ + \ output_animal: Output[Artifact]):\n with open(input_dataset.path, \"\ + r\") as f:\n data = f.read()\n assert data == \"second\"\n \ + \ with open(output_animal.path, \"w\") as f:\n f.write(want_animal)\n\ + \n" + image: python:3.11 + exec-give-animal-2: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - give_animal + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef give_animal(want_animal: str, input_dataset: Input[Dataset],\ + \ output_animal: Output[Artifact]):\n with open(input_dataset.path, \"\ + r\") as f:\n data = f.read()\n assert data == \"second\"\n \ + \ with open(output_animal.path, \"w\") as f:\n f.write(want_animal)\n\ + \n" + image: python:3.11 + exec-give-animal-3: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - give_animal + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef give_animal(want_animal: str, input_dataset: Input[Dataset],\ + \ output_animal: Output[Artifact]):\n with open(input_dataset.path, \"\ + r\") as f:\n data = f.read()\n assert data == \"second\"\n \ + \ with open(output_animal.path, \"w\") as f:\n f.write(want_animal)\n\ + \n" + image: python:3.11 +pipelineInfo: + name: primary-pipeline +root: + dag: + tasks: + check-animal: + cachingOptions: {} + componentRef: + name: comp-check-animal + dependentTasks: + - secondary-pipeline + inputs: + artifacts: + animal_input: + taskOutputArtifact: + outputArtifactKey: Output + producerTask: secondary-pipeline + taskInfo: + name: check-animal + secondary-pipeline: + cachingOptions: {} + componentRef: + name: comp-secondary-pipeline + taskInfo: + name: secondary-pipeline +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.6 diff --git a/backend/src/v2/driver/test_data/pipeline_with_input_status_state.py b/backend/src/v2/driver/test_data/pipeline_with_input_status_state.py new file mode 100644 index 00000000000..2ac5c9a888b --- /dev/null +++ b/backend/src/v2/driver/test_data/pipeline_with_input_status_state.py @@ -0,0 +1,26 @@ + +from kfp import dsl + + +@dsl.component +def echo_state(status: dsl.PipelineTaskFinalStatus): + print(status) + assert(status.state == 'SUCCEEDED') + assert('status-state-pipeline' in status.pipeline_job_resource_name) + assert(status.pipeline_task_name == 'exit-handler-1') + +@dsl.component +def some_task(): + print('Executing some_task()...') + +@dsl.pipeline +def status_state_pipeline(): + echo_state_task = echo_state() + with dsl.ExitHandler(exit_task=echo_state_task): + some_task() + +if __name__ == '__main__': + from kfp import compiler + compiler.Compiler().compile( + pipeline_func=status_state_pipeline, + package_path=__file__.replace('.py', '.yaml')) diff --git a/backend/src/v2/driver/test_data/pipeline_with_input_status_state.yaml b/backend/src/v2/driver/test_data/pipeline_with_input_status_state.yaml new file mode 100644 index 00000000000..9f1556f0e21 --- /dev/null +++ b/backend/src/v2/driver/test_data/pipeline_with_input_status_state.yaml @@ -0,0 +1,111 @@ +# PIPELINE DEFINITION +# Name: status-state-pipeline +components: + comp-echo-state: + executorLabel: exec-echo-state + inputDefinitions: + parameters: + status: + isOptional: true + parameterType: TASK_FINAL_STATUS + comp-exit-handler-1: + dag: + tasks: + some-task: + cachingOptions: {} + componentRef: + name: comp-some-task + taskInfo: + name: some-task + comp-some-task: + executorLabel: exec-some-task +deploymentSpec: + executors: + exec-echo-state: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - echo_state + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef echo_state(status: dsl.PipelineTaskFinalStatus):\n print(status)\n\ + \ assert(status.state == 'SUCCEEDED')\n assert('status-state-pipeline'\ + \ in status.pipeline_job_resource_name)\n assert(status.pipeline_task_name\ + \ == 'exit-handler-1')\n #TODO: Add assert statements to validate status.error_code\ + \ and status.error_message values once those fields have been implemented.\n\ + \n" + image: python:3.11 + exec-some-task: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - some_task + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef some_task():\n print('Executing some_task()...')\n\n" + image: python:3.11 +pipelineInfo: + name: status-state-pipeline +root: + dag: + tasks: + echo-state: + cachingOptions: {} + componentRef: + name: comp-echo-state + dependentTasks: + - exit-handler-1 + inputs: + parameters: + status: + taskFinalStatus: + producerTask: exit-handler-1 + taskInfo: + name: echo-state + triggerPolicy: + strategy: ALL_UPSTREAM_TASKS_COMPLETED + exit-handler-1: + componentRef: + name: comp-exit-handler-1 + taskInfo: + name: exit-handler-1 +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.6 diff --git a/backend/src/v2/driver/test_data/taskOutputArtifact_test.py b/backend/src/v2/driver/test_data/taskOutputArtifact_test.py new file mode 100644 index 00000000000..e050b2b8973 --- /dev/null +++ b/backend/src/v2/driver/test_data/taskOutputArtifact_test.py @@ -0,0 +1,50 @@ +import functools + +from kubernetes import client +import kfp +from kfp import dsl +from kfp.dsl import ( + Input, + Output, + Artifact, + Dataset, + component +) + +base_image="quay.io/opendatahub/ds-pipelines-ci-executor-image:v1.0" +dsl.component = functools.partial(dsl.component, base_image=base_image) + +@component +def create_dataset(output_dataset: Output[Dataset]): + with open(output_dataset.path, "w") as f: + f.write('hurricane') + output_dataset.metadata["category"] = 5 + output_dataset.metadata["description"] = "A simple dataset" + +@component +def process_dataset(input_dataset: Input[Dataset], output_artifact: Output[Artifact]): + with open(input_dataset.path, "r") as f: + data = f.read() + assert data == "hurricane" + with open(output_artifact.path, "w") as f: + f.write(f'very_bad') + +@component +def analyze_artifact(data_input: Input[Artifact], output_artifact: Output[Artifact]): + with open(data_input.path, "r") as f: + data = f.read() + assert data == "very_bad" + with open(output_artifact.path, "w") as f: + f.write(f'done_analyzing') + +@dsl.pipeline +def primary_pipeline(): + dataset_op = create_dataset() + processed = process_dataset(input_dataset=dataset_op.outputs["output_dataset"]) + analyze_artifact(data_input=processed.outputs["output_artifact"]) + +if __name__ == '__main__': + from kfp import compiler + compiler.Compiler().compile( + pipeline_func=primary_pipeline, + package_path=__file__.replace('.py', '.yaml')) diff --git a/backend/src/v2/driver/test_data/taskOutputArtifact_test.yaml b/backend/src/v2/driver/test_data/taskOutputArtifact_test.yaml new file mode 100644 index 00000000000..f4057de6de0 --- /dev/null +++ b/backend/src/v2/driver/test_data/taskOutputArtifact_test.yaml @@ -0,0 +1,175 @@ +# PIPELINE DEFINITION +# Name: primary-pipeline +components: + comp-analyze-artifact: + executorLabel: exec-analyze-artifact + inputDefinitions: + artifacts: + data_input: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + output_artifact: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 + comp-create-dataset: + executorLabel: exec-create-dataset + outputDefinitions: + artifacts: + output_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + comp-process-dataset: + executorLabel: exec-process-dataset + inputDefinitions: + artifacts: + input_dataset: + artifactType: + schemaTitle: system.Dataset + schemaVersion: 0.0.1 + outputDefinitions: + artifacts: + output_artifact: + artifactType: + schemaTitle: system.Artifact + schemaVersion: 0.0.1 +deploymentSpec: + executors: + exec-analyze-artifact: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - analyze_artifact + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef analyze_artifact(data_input: Input[Artifact], output_artifact:\ + \ Output[Artifact]):\n with open(data_input.path, \"r\") as f:\n \ + \ data = f.read()\n assert data == \"very_bad\"\n with open(output_artifact.path,\ + \ \"w\") as f:\n f.write(f'done_analyzing')\n\n" + image: python:3.11 + exec-create-dataset: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - create_dataset + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef create_dataset(output_dataset: Output[Dataset]):\n with open(output_dataset.path,\ + \ \"w\") as f:\n f.write('hurricane')\n output_dataset.metadata[\"\ + category\"] = 5\n output_dataset.metadata[\"description\"] = \"A simple\ + \ dataset\"\n\n" + image: python:3.11 + exec-process-dataset: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - process_dataset + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef process_dataset(input_dataset: Input[Dataset], output_artifact:\ + \ Output[Artifact]):\n with open(input_dataset.path, \"r\") as f:\n \ + \ data = f.read()\n assert data == \"hurricane\"\n with open(output_artifact.path,\ + \ \"w\") as f:\n f.write(f'very_bad')\n\n" + image: python:3.11 +pipelineInfo: + name: primary-pipeline +root: + dag: + tasks: + analyze-artifact: + cachingOptions: {} + componentRef: + name: comp-analyze-artifact + dependentTasks: + - process-dataset + inputs: + artifacts: + data_input: + taskOutputArtifact: + outputArtifactKey: output_artifact + producerTask: process-dataset + taskInfo: + name: analyze-artifact + create-dataset: + cachingOptions: {} + componentRef: + name: comp-create-dataset + taskInfo: + name: create-dataset + process-dataset: + cachingOptions: {} + componentRef: + name: comp-process-dataset + dependentTasks: + - create-dataset + inputs: + artifacts: + input_dataset: + taskOutputArtifact: + outputArtifactKey: output_dataset + producerTask: create-dataset + taskInfo: + name: process-dataset +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.6 diff --git a/backend/src/v2/driver/test_data/taskOutputParameter_test.py b/backend/src/v2/driver/test_data/taskOutputParameter_test.py new file mode 100644 index 00000000000..ae4b1086362 --- /dev/null +++ b/backend/src/v2/driver/test_data/taskOutputParameter_test.py @@ -0,0 +1,45 @@ +import functools + +from kubernetes import client +import kfp +from kfp import dsl +from kfp.dsl import ( + Input, + Output, + Artifact, + Dataset, + component, OutputPath +) + +base_image="quay.io/opendatahub/ds-pipelines-ci-executor-image:v1.0" +dsl.component = functools.partial(dsl.component, base_image=base_image) + +@component +def create_dataset(output_parameter_path: OutputPath(str)): + with open(output_parameter_path, "w") as f: + f.write('hurricane') + +@component +def process_dataset(input_dataset: str, output_int: OutputPath(int)): + assert input_dataset == "hurricane" + with open(output_int, "w") as f: + f.write("100") + +@component +def analyze_artifact(data_input: int, output_opinion: OutputPath(bool)): + assert data_input == 100 + with open(output_opinion, "w") as f: + f.write(str(True)) + +@dsl.pipeline +def primary_pipeline(): + create_dataset_task = create_dataset() + processed_task = process_dataset(input_dataset=create_dataset_task.outputs["output_parameter_path"]) + analyze_artifact(data_input=processed_task.outputs["output_int"]) + +if __name__ == '__main__': + from kfp import compiler + + compiler.Compiler().compile( + pipeline_func=primary_pipeline, + package_path=__file__.replace('.py', '.yaml')) diff --git a/backend/src/v2/driver/test_data/taskOutputParameter_test.yaml b/backend/src/v2/driver/test_data/taskOutputParameter_test.yaml new file mode 100644 index 00000000000..03af1eee297 --- /dev/null +++ b/backend/src/v2/driver/test_data/taskOutputParameter_test.yaml @@ -0,0 +1,162 @@ +# PIPELINE DEFINITION +# Name: primary-pipeline +components: + comp-analyze-artifact: + executorLabel: exec-analyze-artifact + inputDefinitions: + parameters: + data_input: + parameterType: NUMBER_INTEGER + outputDefinitions: + parameters: + output_opinion: + parameterType: BOOLEAN + comp-create-dataset: + executorLabel: exec-create-dataset + outputDefinitions: + parameters: + output_parameter_path: + parameterType: STRING + comp-process-dataset: + executorLabel: exec-process-dataset + inputDefinitions: + parameters: + input_dataset: + parameterType: STRING + outputDefinitions: + parameters: + output_int: + parameterType: NUMBER_INTEGER +deploymentSpec: + executors: + exec-analyze-artifact: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - analyze_artifact + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef analyze_artifact(data_input: int, output_opinion: OutputPath(bool)):\n\ + \ assert data_input == 100\n with open(output_opinion, \"w\") as f:\n\ + \ f.write(str(True))\n\n" + image: python:3.11 + exec-create-dataset: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - create_dataset + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef create_dataset(output_parameter_path: OutputPath(str)):\n \ + \ with open(output_parameter_path, \"w\") as f:\n f.write('hurricane')\n\ + \n" + image: python:3.11 + exec-process-dataset: + container: + args: + - --executor_input + - '{{$}}' + - --function_to_execute + - process_dataset + command: + - sh + - -c + - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ + \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ + \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.6'\ + \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"\ + $0\" \"$@\"\n" + - sh + - -ec + - 'program_path=$(mktemp -d) + + + printf "%s" "$0" > "$program_path/ephemeral_component.py" + + _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" + + ' + - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ + \ *\n\ndef process_dataset(input_dataset: str, output_int: OutputPath(int)):\n\ + \ assert input_dataset == \"hurricane\"\n with open(output_int, \"\ + w\") as f:\n f.write(\"100\")\n\n" + image: python:3.11 +pipelineInfo: + name: primary-pipeline +root: + dag: + tasks: + analyze-artifact: + cachingOptions: {} + componentRef: + name: comp-analyze-artifact + dependentTasks: + - process-dataset + inputs: + parameters: + data_input: + taskOutputParameter: + outputParameterKey: output_int + producerTask: process-dataset + taskInfo: + name: analyze-artifact + create-dataset: + cachingOptions: {} + componentRef: + name: comp-create-dataset + taskInfo: + name: create-dataset + process-dataset: + cachingOptions: {} + componentRef: + name: comp-process-dataset + dependentTasks: + - create-dataset + inputs: + parameters: + input_dataset: + taskOutputParameter: + outputParameterKey: output_parameter_path + producerTask: create-dataset + taskInfo: + name: process-dataset +schemaVersion: 2.1.0 +sdkVersion: kfp-2.14.6 diff --git a/backend/src/v2/driver/util.go b/backend/src/v2/driver/util.go index 33da7800713..42b940aa361 100644 --- a/backend/src/v2/driver/util.go +++ b/backend/src/v2/driver/util.go @@ -15,94 +15,148 @@ package driver import ( + "context" "fmt" - "regexp" - "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" - "google.golang.org/protobuf/types/known/structpb" + apiV2beta1 "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" + "github.com/kubeflow/pipelines/backend/src/v2/apiclient/kfpapi" + "github.com/kubeflow/pipelines/backend/src/v2/driver/common" + "github.com/kubeflow/pipelines/backend/src/v2/driver/resolver" ) -// inputPipelineChannelPattern define a regex pattern to match the content within single quotes -// example input channel looks like "{{$.inputs.parameters['pipelinechannel--val']}}" -const inputPipelineChannelPattern = `\$.inputs.parameters\['(.+?)'\]` - -func isInputParameterChannel(inputChannel string) bool { - re := regexp.MustCompile(inputPipelineChannelPattern) - match := re.FindStringSubmatch(inputChannel) - if len(match) == 2 { - return true - } else { - // if len(match) > 2, then this is still incorrect because - // inputChannel should contain only one parameter channel input - return false +// validateRootDAG contains validation for root DAG driver options. +func validateRootDAG(opts common.Options) (err error) { + defer func() { + if err != nil { + err = fmt.Errorf("invalid root DAG driver args: %w", err) + } + }() + if opts.PipelineName == "" { + return fmt.Errorf("pipeline name is required") + } + if opts.Run.GetRunId() == "" { + return fmt.Errorf("KFP run ID is required") + } + if opts.Component == nil { + return fmt.Errorf("component spec is required") + } + if opts.RuntimeConfig == nil { + return fmt.Errorf("runtime config is required") + } + if opts.Namespace == "" { + return fmt.Errorf("namespace is required") } + if opts.Task.GetTaskInfo().GetName() != "" { + return fmt.Errorf("task spec is unnecessary") + } + if opts.ParentTask != nil && opts.ParentTask.GetTaskId() == "" { + return fmt.Errorf("parent task is required") + } + if opts.Container != nil { + return fmt.Errorf("container spec is unnecessary") + } + if opts.IterationIndex >= 0 { + return fmt.Errorf("iteration index is unnecessary") + } + return nil } -// extractInputParameterFromChannel takes an inputChannel that adheres to -// inputPipelineChannelPattern and extracts the channel parameter name. -// For example given an input channel of the form "{{$.inputs.parameters['pipelinechannel--val']}}" -// the channel parameter name "pipelinechannel--val" is returned. -func extractInputParameterFromChannel(inputChannel string) (string, error) { - re := regexp.MustCompile(inputPipelineChannelPattern) - match := re.FindStringSubmatch(inputChannel) - if len(match) > 1 { - extractedValue := match[1] - return extractedValue, nil - } else { - return "", fmt.Errorf("failed to extract input parameter from channel: %s", inputChannel) +// validateDAG validates non-root DAG options. +func validateDAG(opts common.Options) (err error) { + defer func() { + if err != nil { + err = fmt.Errorf("invalid DAG driver args: %w", err) + } + }() + if opts.Container != nil { + return fmt.Errorf("container spec is unnecessary") } + return validateNonRoot(opts) } -// inputParamConstant convert and return value as a RuntimeValue -func inputParamConstant(value string) *pipelinespec.TaskInputsSpec_InputParameterSpec { - return &pipelinespec.TaskInputsSpec_InputParameterSpec{ - Kind: &pipelinespec.TaskInputsSpec_InputParameterSpec_RuntimeValue{ - RuntimeValue: &pipelinespec.ValueOrRuntimeParameter{ - Value: &pipelinespec.ValueOrRuntimeParameter_Constant{ - Constant: structpb.NewStringValue(value), - }, - }, - }, +func validateNonRoot(opts common.Options) error { + if opts.PipelineName == "" { + return fmt.Errorf("pipeline name is required") } + if opts.Run.GetRunId() == "" { + return fmt.Errorf("KFP run ID is required") + } + if opts.Component == nil { + return fmt.Errorf("component spec is required") + } + if opts.Task.GetTaskInfo().GetName() == "" { + return fmt.Errorf("task spec is required") + } + if opts.RuntimeConfig != nil { + return fmt.Errorf("runtime config is unnecessary") + } + if opts.ParentTask != nil && opts.ParentTask.GetTaskId() == "" { + return fmt.Errorf("parent task is required") + } + if opts.ParentTask.GetScopePath() == nil { + return fmt.Errorf("parent task scope path is required for DAG") + } + return nil } -// inputParamComponent convert and return value as a ComponentInputParameter -func inputParamComponent(value string) *pipelinespec.TaskInputsSpec_InputParameterSpec { - return &pipelinespec.TaskInputsSpec_InputParameterSpec{ - Kind: &pipelinespec.TaskInputsSpec_InputParameterSpec_ComponentInputParameter{ - ComponentInputParameter: value, - }, +// handleInputTaskParametersCreation creates a new PipelineTaskDetail_InputOutputs_IOParameter +// for each parameter in the executor input. +func handleInputTaskParametersCreation( + parameterMetadata []resolver.ParameterMetadata, + task *apiV2beta1.PipelineTaskDetail, +) (*apiV2beta1.PipelineTaskDetail, error) { + if task == nil { + return nil, fmt.Errorf("task is nil") + } + if task.Inputs == nil { + task.Inputs = &apiV2beta1.PipelineTaskDetail_InputOutputs{ + Parameters: []*apiV2beta1.PipelineTaskDetail_InputOutputs_IOParameter{}, + } + } else if task.Inputs.Parameters == nil { + task.Inputs.Parameters = []*apiV2beta1.PipelineTaskDetail_InputOutputs_IOParameter{} } -} -// inputParamTaskOutput convert and return producerTask & outputParamKey -// as a TaskOutputParameter. -func inputParamTaskOutput(producerTask, outputParamKey string) *pipelinespec.TaskInputsSpec_InputParameterSpec { - return &pipelinespec.TaskInputsSpec_InputParameterSpec{ - Kind: &pipelinespec.TaskInputsSpec_InputParameterSpec_TaskOutputParameter{ - TaskOutputParameter: &pipelinespec.TaskInputsSpec_InputParameterSpec_TaskOutputParameterSpec{ - ProducerTask: producerTask, - OutputParameterKey: outputParamKey, - }, - }, + for _, pm := range parameterMetadata { + parameterNew := pm.ParameterIO + task.Inputs.Parameters = append(task.Inputs.Parameters, parameterNew) } + return task, nil } -// Get iteration items from a structpb.Value. -// Return value may be -// * a list of JSON serializable structs -// * a list of structpb.Value -func getItems(value *structpb.Value) (items []*structpb.Value, err error) { - switch v := value.GetKind().(type) { - case *structpb.Value_ListValue: - return v.ListValue.GetValues(), nil - case *structpb.Value_StringValue: - listValue := structpb.Value{} - if err = listValue.UnmarshalJSON([]byte(v.StringValue)); err != nil { - return nil, err +// handleInputTaskArtifactsCreation creates a new ArtifactTask for each input artifact. +// The artifactsTasks are created as input artifacts. This allows KFP backend to +// list input artifacts for this task. Parameters do not require this additional overhead +// because parameters are stored in the task itself. +func handleInputTaskArtifactsCreation( + ctx context.Context, + opts common.Options, + artifactMetadata []resolver.ArtifactMetadata, + task *apiV2beta1.PipelineTaskDetail, + kfpAPI kfpapi.API, +) error { + var artifactTasks []*apiV2beta1.ArtifactTask + for _, am := range artifactMetadata { + for _, artifact := range am.ArtifactIO.Artifacts { + if artifact.ArtifactId == "" { + return fmt.Errorf("artifact id is required") + } + at := &apiV2beta1.ArtifactTask{ + ArtifactId: artifact.ArtifactId, + RunId: opts.Run.GetRunId(), + TaskId: task.TaskId, + Type: am.ArtifactIO.Type, + Producer: am.ArtifactIO.Producer, + Key: am.ArtifactIO.ArtifactKey, + } + artifactTasks = append(artifactTasks, at) + } + } + if len(artifactTasks) > 0 { + request := apiV2beta1.CreateArtifactTasksBulkRequest{ArtifactTasks: artifactTasks} + _, err := kfpAPI.CreateArtifactTasks(ctx, &request) + if err != nil { + return err } - return listValue.GetListValue().GetValues(), nil - default: - return nil, fmt.Errorf("value of type %T cannot be iterated", v) } + return nil } diff --git a/backend/src/v2/driver/util_test.go b/backend/src/v2/driver/util_test.go index 4fd5fe65c92..3c993f370ee 100644 --- a/backend/src/v2/driver/util_test.go +++ b/backend/src/v2/driver/util_test.go @@ -18,89 +18,11 @@ import ( "testing" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + "github.com/kubeflow/pipelines/backend/src/v2/driver/resolver" "github.com/stretchr/testify/assert" structpb "google.golang.org/protobuf/types/known/structpb" ) -func Test_isInputParameterChannel(t *testing.T) { - tests := []struct { - name string - input string - isValid bool - }{ - { - name: "wellformed pipeline channel should produce no errors", - input: "{{$.inputs.parameters['pipelinechannel--someParameterName']}}", - isValid: true, - }, - { - name: "pipeline channel index should have quotes", - input: "{{$.inputs.parameters[pipelinechannel--someParameterName]}}", - isValid: false, - }, - { - name: "plain text as pipelinechannel of parameter type is invalid", - input: "randomtext", - isValid: false, - }, - { - name: "inputs should be prefixed with $.", - input: "{{inputs.parameters['pipelinechannel--someParameterName']}}", - isValid: false, - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - assert.Equal(t, isInputParameterChannel(test.input), test.isValid) - }) - } -} - -func Test_extractInputParameterFromChannel(t *testing.T) { - tests := []struct { - name string - input string - expected string - wantErr bool - }{ - { - name: "standard parameter pipeline channel input", - input: "{{$.inputs.parameters['pipelinechannel--someParameterName']}}", - expected: "pipelinechannel--someParameterName", - wantErr: false, - }, - { - name: "a more complex parameter pipeline channel input", - input: "{{$.inputs.parameters['pipelinechannel--somePara-me_terName']}}", - expected: "pipelinechannel--somePara-me_terName", - wantErr: false, - }, - { - name: "invalid input should return err", - input: "invalidvalue", - wantErr: true, - }, - { - name: "invalid input should return err 2", - input: "pipelinechannel--somePara-me_terName", - wantErr: true, - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - actual, err := extractInputParameterFromChannel(test.input) - if test.wantErr { - assert.NotNil(t, err) - } else { - assert.NoError(t, err) - assert.Equal(t, actual, test.expected) - } - }) - } -} - func Test_resolvePodSpecRuntimeParameter(t *testing.T) { tests := []struct { name string @@ -148,7 +70,7 @@ func Test_resolvePodSpecRuntimeParameter(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - actual, err := resolvePodSpecInputRuntimeParameter(test.input, test.executorInput) + actual, err := resolver.ResolveParameterOrPipelineChannel(test.input, test.executorInput) if test.wantErr { assert.NotNil(t, err) } else { diff --git a/backend/src/v2/expression/expression.go b/backend/src/v2/expression/expression.go index 440a58c5a3d..c79b70f6df6 100644 --- a/backend/src/v2/expression/expression.go +++ b/backend/src/v2/expression/expression.go @@ -24,7 +24,7 @@ import ( "github.com/google/cel-go/common/types/ref" "github.com/google/cel-go/interpreter/functions" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" - "github.com/kubeflow/pipelines/backend/src/v2/metadata" + "github.com/kubeflow/pipelines/backend/src/common/util" exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" "google.golang.org/protobuf/proto" "google.golang.org/protobuf/types/known/structpb" @@ -95,7 +95,7 @@ func (e *Expr) Select(v *structpb.Value, expr string) (*structpb.Value, error) { if v != nil { // TODO(Bobgy): discuss whether we need to remove this. // We always allow accessing the value as string_value, it gets JSON serialized version of the value. - text, err := metadata.PbValueToText(v) + text, err := util.PBValueToText(v) if err != nil { return nil, err } diff --git a/backend/src/v2/metadata/client.go b/backend/src/v2/metadata/client.go deleted file mode 100644 index 57ee2b456fe..00000000000 --- a/backend/src/v2/metadata/client.go +++ /dev/null @@ -1,1324 +0,0 @@ -// Copyright 2021-2023 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package metadata contains types to record/retrieve metadata stored in MLMD -// for individual pipeline steps. -package metadata - -import ( - "context" - "crypto/tls" - "encoding/json" - "errors" - "fmt" - "path" - "strconv" - "strings" - "sync" - "time" - - "github.com/kubeflow/pipelines/backend/src/common/util" - "gopkg.in/yaml.v3" - - "github.com/kubeflow/pipelines/backend/src/v2/objectstore" - - "google.golang.org/grpc/credentials" - "google.golang.org/grpc/credentials/insecure" - - "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" - - "github.com/golang/glog" - grpc_retry "github.com/grpc-ecosystem/go-grpc-middleware/v2/interceptors/retry" - pb "github.com/kubeflow/pipelines/third_party/ml-metadata/go/ml_metadata" - "google.golang.org/grpc" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/status" - "google.golang.org/protobuf/encoding/protojson" - "google.golang.org/protobuf/proto" - "google.golang.org/protobuf/types/known/structpb" -) - -const ( - pipelineContextTypeName = "system.Pipeline" - pipelineRunContextTypeName = "system.PipelineRun" - ImporterExecutionTypeName = "system.ImporterExecution" - mlmdClientSideMaxRetries = 3 -) - -type ExecutionType string - -const ( - ContainerExecutionTypeName ExecutionType = "system.ContainerExecution" - DagExecutionTypeName ExecutionType = "system.DAGExecution" -) - -var ( - // Note: All types are schemaless so we can easily evolve the types as needed. - pipelineContextType = &pb.ContextType{ - Name: proto.String(pipelineContextTypeName), - } - - pipelineRunContextType = &pb.ContextType{ - Name: proto.String(pipelineRunContextTypeName), - } - - dagExecutionType = &pb.ExecutionType{ - Name: proto.String(string(DagExecutionTypeName)), - } - - containerExecutionType = &pb.ExecutionType{ - Name: proto.String(string(ContainerExecutionTypeName)), - } - importerExecutionType = &pb.ExecutionType{ - Name: proto.String(ImporterExecutionTypeName), - } -) - -type ClientInterface interface { - GetPipeline(ctx context.Context, pipelineName, runID, namespace, runResource, pipelineRoot, storeSessionInfo string) (*Pipeline, error) - GetDAG(ctx context.Context, executionID int64) (*DAG, error) - PublishExecution(ctx context.Context, execution *Execution, outputParameters map[string]*structpb.Value, outputArtifacts []*OutputArtifact, state pb.Execution_State) error - CreateExecution(ctx context.Context, pipeline *Pipeline, config *ExecutionConfig) (*Execution, error) - PrePublishExecution(ctx context.Context, execution *Execution, config *ExecutionConfig) (*Execution, error) - GetExecutions(ctx context.Context, ids []int64) ([]*pb.Execution, error) - GetExecution(ctx context.Context, id int64) (*Execution, error) - GetPipelineFromExecution(ctx context.Context, id int64) (*Pipeline, error) - GetExecutionsInDAG(ctx context.Context, dag *DAG, pipeline *Pipeline, filter bool) (executionsMap map[string]*Execution, err error) - UpdateDAGExecutionsState(ctx context.Context, dag *DAG, pipeline *Pipeline) (err error) - PutDAGExecutionState(ctx context.Context, executionID int64, state pb.Execution_State) (err error) - GetEventsByArtifactIDs(ctx context.Context, artifactIds []int64) ([]*pb.Event, error) - GetArtifactName(ctx context.Context, artifactId int64) (string, error) - GetArtifacts(ctx context.Context, ids []int64) ([]*pb.Artifact, error) - GetOutputArtifactsByExecutionId(ctx context.Context, executionId int64) (map[string]*OutputArtifact, error) - RecordArtifact(ctx context.Context, outputName, schema string, runtimeArtifact *pipelinespec.RuntimeArtifact, state pb.Artifact_State, bucketConfig *objectstore.Config) (*OutputArtifact, error) - GetOrInsertArtifactType(ctx context.Context, schema string) (typeID int64, err error) - FindMatchedArtifact(ctx context.Context, artifactToMatch *pb.Artifact, pipelineContextId int64) (matchedArtifact *pb.Artifact, err error) -} - -// Client is an MLMD service client. -type Client struct { - svc pb.MetadataStoreServiceClient - ctxTypeCache sync.Map -} - -// NewClient creates a Client given the MLMD server address and port. -func NewClient(serverAddress, serverPort string, tlsCfg *tls.Config) (*Client, error) { - opts := []grpc_retry.CallOption{ - grpc_retry.WithMax(mlmdClientSideMaxRetries), - grpc_retry.WithBackoff(grpc_retry.BackoffExponentialWithJitter(300*time.Millisecond, 0.20)), - grpc_retry.WithCodes(codes.Aborted), - } - - creds := insecure.NewCredentials() - if tlsCfg != nil { - creds = credentials.NewTLS(tlsCfg) - } - - conn, err := grpc.Dial(fmt.Sprintf("%s:%s", serverAddress, serverPort), - grpc.WithTransportCredentials(creds), - grpc.WithStreamInterceptor(grpc_retry.StreamClientInterceptor(opts...)), - grpc.WithUnaryInterceptor(grpc_retry.UnaryClientInterceptor(opts...)), - ) - if err != nil { - return nil, fmt.Errorf("metadata.NewClient() failed: %w", err) - } - - return &Client{ - svc: pb.NewMetadataStoreServiceClient(conn), - ctxTypeCache: sync.Map{}, - }, nil -} - -// ExecutionConfig represents the input parameters and artifacts to an Execution. -type ExecutionConfig struct { - TaskName string - DisplayName string // optional, MLMD execution display name. - Name string // optional, MLMD execution name. When provided, this needs to be unique among all MLMD executions. - ExecutionType ExecutionType - NotTriggered bool // optional, not triggered executions will have CANCELED state. - ParentDagID int64 // parent DAG execution ID. Only the root DAG does not have a parent DAG. - InputParameters map[string]*structpb.Value - OutputParameters map[string]*pipelinespec.DagOutputsSpec_DagOutputParameterSpec - OutputArtifacts map[string]*pipelinespec.DagOutputsSpec_DagOutputArtifactSpec - InputArtifactIDs map[string][]int64 - IterationIndex *int // Index of the iteration. - - // ContainerExecution custom properties - Image, CachedMLMDExecutionID, FingerPrint string - PodName, PodUID, Namespace string - - // DAGExecution custom properties - IterationCount *int // Number of iterations for an iterator DAG. - TotalDagTasks *int // Number of tasks inside the DAG -} - -// InputArtifact is a wrapper around an MLMD artifact used as component inputs. -type InputArtifact struct { - Artifact *pb.Artifact -} - -// OutputArtifact represents a schema and an MLMD artifact for output artifacts -// produced by a component. -type OutputArtifact struct { - Name string - Artifact *pb.Artifact - Schema string -} - -func (oa *OutputArtifact) Marshal() ([]byte, error) { - b, err := protojson.Marshal(oa.Artifact) - if err != nil { - return nil, err - } - return b, nil -} - -func (oa *OutputArtifact) ToRuntimeArtifact() (*pipelinespec.RuntimeArtifact, error) { - if oa == nil { - return nil, nil - } - ra, err := toRuntimeArtifact(oa.Artifact) - if err != nil { - return nil, err - } - if ra.Type == nil { - ra.Type = &pipelinespec.ArtifactTypeSchema{} - } - ra.Type.Kind = &pipelinespec.ArtifactTypeSchema_InstanceSchema{ - InstanceSchema: oa.Schema, - } - return ra, nil -} - -// Pipeline is a handle for the current pipeline. -type Pipeline struct { - pipelineCtx *pb.Context - pipelineRunCtx *pb.Context -} - -func (p *Pipeline) GetRunCtxID() int64 { - if p == nil { - return 0 - } - return p.pipelineRunCtx.GetId() -} - -func (p *Pipeline) GetCtxID() int64 { - if p == nil { - return 0 - } - return p.pipelineCtx.GetId() -} - -func (p *Pipeline) GetStoreSessionInfo() string { - if p == nil { - return "" - } - props := p.pipelineRunCtx.GetCustomProperties() - storeSessionInfo, ok := props[keyStoreSessionInfo] - if !ok { - return "" - } - return storeSessionInfo.GetStringValue() -} - -func (p *Pipeline) GetPipelineRoot() string { - if p == nil { - return "" - } - props := p.pipelineRunCtx.GetCustomProperties() - root, ok := props[keyPipelineRoot] - if !ok { - return "" - } - return root.GetStringValue() -} - -// Execution is a handle for the current execution. -type Execution struct { - execution *pb.Execution - pipeline *Pipeline -} - -func (e *Execution) GetID() int64 { - if e == nil { - return 0 - } - return e.execution.GetId() -} - -func (e *Execution) String() string { - if e == nil { - return "" - } - return e.execution.String() -} - -func (e *Execution) GetPipeline() *Pipeline { - if e == nil { - return nil - } - return e.pipeline -} - -func (e *Execution) GetExecution() *pb.Execution { - if e == nil { - return nil - } - return e.execution -} - -func (e *Execution) TaskName() string { - if e == nil { - return "" - } - return e.execution.GetCustomProperties()[keyTaskName].GetStringValue() -} - -func (e *Execution) FingerPrint() string { - if e == nil { - return "" - } - return e.execution.GetCustomProperties()[keyCacheFingerPrint].GetStringValue() -} - -// GetTaskNameWithDagID appends the taskName with its parent dag id. This is -// used to help avoid collisions when creating the taskMap for downstream input -// resolution. -func GetTaskNameWithDagID(taskName string, dagID int64) string { - return fmt.Sprintf("%s_%d", taskName, dagID) -} - -// GetParallelForTaskName appends the taskName with an iteration index. This is -// used to help further avoid collisions with parallelFor tasks with the taskMap -// for downstream input resolution. -func GetParallelForTaskName(taskName string, iterationIndex int64) string { - return fmt.Sprintf("%s_idx_%d", taskName, iterationIndex) -} - -// GenerateOutputURI appends the specified paths to the pipeline root. -// It may be configured to preserve the query part of the pipeline root -// by splitting it off and appending it back to the full URI. -func GenerateOutputURI(pipelineRoot string, paths []string, preserveQueryString bool) string { - querySplit := strings.Split(pipelineRoot, "?") - query := "" - if len(querySplit) == 2 { - pipelineRoot = querySplit[0] - if preserveQueryString { - query = "?" + querySplit[1] - } - } else if len(querySplit) > 2 { - // this should never happen, but just in case. - glog.Warningf("Unexpected pipeline root: %v", pipelineRoot) - } - // we cannot path.Join(root, taskName, artifactName), because root - // contains scheme like gs:// and path.Join cleans up scheme to gs:/ - return fmt.Sprintf("%s/%s%s", strings.TrimRight(pipelineRoot, "/"), path.Join(paths...), query) -} - -// GetPipeline returns the current pipeline represented by the specified -// pipeline name and run ID. -func (c *Client) GetPipeline(ctx context.Context, pipelineName, runID, namespace, runResource, pipelineRoot, storeSessionInfo string) (*Pipeline, error) { - pipelineContext, err := c.getOrInsertContext(ctx, pipelineName, pipelineContextType, nil) - if err != nil { - return nil, err - } - glog.Infof("Pipeline Context: %+v", pipelineContext) - metadata := map[string]*pb.Value{ - keyNamespace: StringValue(namespace), - keyResourceName: StringValue(runResource), - // pipeline root of this run - keyPipelineRoot: StringValue(GenerateOutputURI(pipelineRoot, []string{pipelineName, runID}, true)), - keyStoreSessionInfo: StringValue(storeSessionInfo), - } - runContext, err := c.getOrInsertContext(ctx, runID, pipelineRunContextType, metadata) - glog.Infof("Pipeline Run Context: %+v", runContext) - if err != nil { - return nil, err - } - - // Detect whether such parent-child relationship exists. - resParents, err := c.svc.GetParentContextsByContext(ctx, &pb.GetParentContextsByContextRequest{ - ContextId: runContext.Id, - }) - if err != nil { - return nil, err - } - parents := resParents.GetContexts() - if len(parents) > 1 { - return nil, fmt.Errorf("Current run context has more than 1 parent context: %v", parents) - } - if len(parents) == 1 { - // Parent-child context alredy exists. - if parents[0].GetId() != pipelineContext.GetId() { - return nil, fmt.Errorf("Parent context ID %d of current run is different from expected: %d", - parents[0].GetId(), - pipelineContext.GetId()) - } - return &Pipeline{ - pipelineCtx: pipelineContext, - pipelineRunCtx: runContext, - }, nil - } - - // Insert ParentContext relationship if doesn't exist. - err = c.putParentContexts(ctx, &pb.PutParentContextsRequest{ - ParentContexts: []*pb.ParentContext{{ - ChildId: runContext.Id, - ParentId: pipelineContext.Id, - }}, - }) - if err != nil { - return nil, err - } - - return &Pipeline{ - pipelineCtx: pipelineContext, - pipelineRunCtx: runContext, - }, nil -} - -// a Kubeflow Pipelines DAG -type DAG struct { - Execution *Execution -} - -// identifier info for error message purposes -func (d *DAG) Info() string { - return fmt.Sprintf("DAG(executionID=%v)", d.Execution.GetID()) -} - -func (c *Client) GetDAG(ctx context.Context, executionID int64) (*DAG, error) { - dagError := func(err error) error { - return fmt.Errorf("failed to get DAG executionID=%v: %w", executionID, err) - } - res, err := c.GetExecution(ctx, executionID) - if err != nil { - return nil, dagError(err) - } - execution := res.GetExecution() - // TODO(Bobgy): verify execution type is system.DAGExecution - return &DAG{Execution: &Execution{execution: execution}}, nil -} - -func (c *Client) putParentContexts(ctx context.Context, req *pb.PutParentContextsRequest) error { - _, err := c.svc.PutParentContexts(ctx, req) - if err != nil { - if status.Convert(err).Code() == codes.AlreadyExists { - // already exists code is expected when multiple requests are sent in parallel - } else { - return fmt.Errorf("Failed PutParentContexts(%v): %w", req.String(), err) - } - } - return nil -} - -func (c *Client) getExecutionTypeID(ctx context.Context, executionType *pb.ExecutionType) (int64, error) { - eType, err := c.svc.PutExecutionType(ctx, &pb.PutExecutionTypeRequest{ - ExecutionType: executionType, - }) - - if err != nil { - return 0, err - } - - return eType.GetTypeId(), nil -} - -func StringValue(s string) *pb.Value { - return &pb.Value{Value: &pb.Value_StringValue{StringValue: s}} -} - -func intValue(i int64) *pb.Value { - return &pb.Value{Value: &pb.Value_IntValue{IntValue: i}} -} - -func doubleValue(f float64) *pb.Value { - return &pb.Value{Value: &pb.Value_DoubleValue{DoubleValue: f}} -} - -// Event path is conceptually artifact name for the execution. -// We cannot store the name as a property of artifact "a", because for example: -// 1. In first task "preprocess", there's an output artifact "processed_data". -// 2. In second task "train", there's an input artifact "dataset" passed from "preprocess" -// task's "processed_data" output. -// -// Now the same artifact is called "processed_data" in "preprocess" task, but "dataset" in -// "train" task, because artifact name is related to the context it's used. -// Therefore, we should store artifact name as a property of the artifact's events -// (connects artifact and execution) instead of the artifact's property. -func eventPath(artifactName string) *pb.Event_Path { - return &pb.Event_Path{ - Steps: []*pb.Event_Path_Step{{ - Value: &pb.Event_Path_Step_Key{ - Key: artifactName, - }, - }}, - } -} - -func getArtifactName(eventPath *pb.Event_Path) (string, error) { - if eventPath == nil || len(eventPath.Steps) == 0 { - return "", fmt.Errorf("failed to get artifact name from eventPath") - } - return eventPath.Steps[0].GetKey(), nil -} - -// PublishExecution publishes the specified execution with the given output -// parameters, artifacts and state. -func (c *Client) PublishExecution(ctx context.Context, execution *Execution, outputParameters map[string]*structpb.Value, outputArtifacts []*OutputArtifact, state pb.Execution_State) error { - e := execution.execution - e.LastKnownState = state.Enum() - glog.V(4).Infof("outputParameters: %v", outputParameters) - glog.V(4).Infof("outputArtifacts: %v", outputArtifacts) - - if outputParameters != nil { - // Record output parameters. - outputs := &pb.Value_StructValue{ - StructValue: &structpb.Struct{ - Fields: make(map[string]*structpb.Value), - }, - } - for n, p := range outputParameters { - outputs.StructValue.Fields[n] = p - } - e.CustomProperties[keyOutputs] = &pb.Value{Value: outputs} - } - - contexts := []*pb.Context{} - if execution.pipeline != nil { - contexts = append(contexts, execution.pipeline.pipelineCtx, execution.pipeline.pipelineRunCtx) - } - req := &pb.PutExecutionRequest{ - Execution: e, - Contexts: contexts, - } - - for _, oa := range outputArtifacts { - aePair := &pb.PutExecutionRequest_ArtifactAndEvent{} - if oa.Artifact.GetId() == 0 { - glog.Infof("the id of output artifact is not set, will create new artifact when publishing execution") - aePair = &pb.PutExecutionRequest_ArtifactAndEvent{ - Artifact: oa.Artifact, - Event: &pb.Event{ - Type: pb.Event_OUTPUT.Enum(), - Path: eventPath(oa.Name), - }, - } - } else { - aePair = &pb.PutExecutionRequest_ArtifactAndEvent{ - Event: &pb.Event{ - Type: pb.Event_OUTPUT.Enum(), - Path: eventPath(oa.Name), - ArtifactId: oa.Artifact.Id, - }, - } - } - req.ArtifactEventPairs = append(req.ArtifactEventPairs, aePair) - } - - _, err := c.svc.PutExecution(ctx, req) - return err -} - -// metadata keys -const ( - keyDisplayName = "display_name" - keyTaskName = "task_name" - keyImage = "image" - keyPodName = "pod_name" - keyPodUID = "pod_uid" - keyNamespace = "namespace" - keyResourceName = "resource_name" - keyPipelineRoot = "pipeline_root" - keyStoreSessionInfo = "store_session_info" - keyCacheFingerPrint = "cache_fingerprint" - keyCachedExecutionID = "cached_execution_id" - keyInputs = "inputs" - keyOutputs = "outputs" - keyParameterProducerTask = "parameter_producer_task" - keyOutputArtifacts = "output_artifacts" - keyArtifactProducerTask = "artifact_producer_task" - keyParentDagID = "parent_dag_id" // Parent DAG Execution ID. - keyIterationIndex = "iteration_index" - keyIterationCount = "iteration_count" - keyTotalDagTasks = "total_dag_tasks" -) - -// CreateExecution creates a new MLMD execution under the specified Pipeline. -func (c *Client) CreateExecution(ctx context.Context, pipeline *Pipeline, config *ExecutionConfig) (*Execution, error) { - if config == nil { - return nil, fmt.Errorf("metadata.CreateExecution got config == nil") - } - typeID, err := c.getExecutionTypeID(ctx, &pb.ExecutionType{ - Name: proto.String(string(config.ExecutionType)), - }) - if err != nil { - return nil, err - } - - e := &pb.Execution{ - TypeId: &typeID, - CustomProperties: map[string]*pb.Value{ - keyDisplayName: StringValue(config.DisplayName), - keyTaskName: StringValue(config.TaskName), - }, - } - if config.Name != "" { - e.Name = &config.Name - } - e.LastKnownState = pb.Execution_RUNNING.Enum() - if config.NotTriggered { - // Note, in MLMD, CANCELED state means exactly as what we call - // not triggered. - // Reference: https://github.com/google/ml-metadata/blob/3434ebaf36db54a7e67dbb0793980a74ec0c5d50/ml_metadata/proto/metadata_store.proto#L251-L254 - e.LastKnownState = pb.Execution_CANCELED.Enum() - } - if config.ParentDagID != 0 { - e.CustomProperties[keyParentDagID] = intValue(config.ParentDagID) - } - if config.IterationIndex != nil { - e.CustomProperties[keyIterationIndex] = intValue(int64(*config.IterationIndex)) - } - if config.IterationCount != nil { - e.CustomProperties[keyIterationCount] = intValue(int64(*config.IterationCount)) - } - if config.ExecutionType == ContainerExecutionTypeName { - e.CustomProperties[keyPodName] = StringValue(config.PodName) - e.CustomProperties[keyPodUID] = StringValue(config.PodUID) - e.CustomProperties[keyNamespace] = StringValue(config.Namespace) - e.CustomProperties[keyImage] = StringValue(config.Image) - if config.CachedMLMDExecutionID != "" { - e.CustomProperties[keyCachedExecutionID] = StringValue(config.CachedMLMDExecutionID) - } - if config.FingerPrint != "" { - e.CustomProperties[keyCacheFingerPrint] = StringValue(config.FingerPrint) - } - } - if config.InputParameters != nil { - e.CustomProperties[keyInputs] = &pb.Value{Value: &pb.Value_StructValue{ - StructValue: &structpb.Struct{ - Fields: config.InputParameters, - }, - }} - } - // We save the output parameter and output artifact relationships in MLMD in - // case they're provided by a sub-task so that we can follow the - // relationships and retrieve outputs downstream in components that depend - // on said outputs as inputs. - if config.OutputParameters != nil { - // Convert OutputParameters to a format that can be saved in MLMD. - glog.V(4).Info("outputParameters: ", config.OutputParameters) - outputParametersCustomPropertyProtoMap := make(map[string]*structpb.Value) - - for name, value := range config.OutputParameters { - if outputParameterProtoMsg, ok := interface{}(value).(proto.Message); ok { - glog.V(4).Infof("name: %v, value: %w", name, value) - glog.V(4).Info("protoMessage: ", outputParameterProtoMsg) - b, err := protojson.Marshal(outputParameterProtoMsg) - if err != nil { - return nil, err - } - outputValue, _ := structpb.NewValue(string(b)) - outputParametersCustomPropertyProtoMap[name] = outputValue - } - } - e.CustomProperties[keyParameterProducerTask] = &pb.Value{Value: &pb.Value_StructValue{ - StructValue: &structpb.Struct{ - Fields: outputParametersCustomPropertyProtoMap, - }, - }} - } - if config.OutputArtifacts != nil { - b, err := json.Marshal(config.OutputArtifacts) - if err != nil { - return nil, err - } - e.CustomProperties[keyArtifactProducerTask] = StringValue(string(b)) - } - if config.TotalDagTasks != nil { - e.CustomProperties[keyTotalDagTasks] = intValue(int64(*config.TotalDagTasks)) - } - - req := &pb.PutExecutionRequest{ - Execution: e, - Contexts: []*pb.Context{pipeline.pipelineCtx, pipeline.pipelineRunCtx}, - } - - for name, ids := range config.InputArtifactIDs { - for _, id := range ids { - thisId := id // thisId will be referenced below, so we need a local immutable var - aePair := &pb.PutExecutionRequest_ArtifactAndEvent{ - Event: &pb.Event{ - ArtifactId: &thisId, - Path: eventPath(name), - Type: pb.Event_INPUT.Enum(), - }, - } - req.ArtifactEventPairs = append(req.ArtifactEventPairs, aePair) - } - } - - res, err := c.svc.PutExecution(ctx, req) - if err != nil { - return nil, err - } - - getReq := &pb.GetExecutionsByIDRequest{ - ExecutionIds: []int64{res.GetExecutionId()}, - } - - getRes, err := c.svc.GetExecutionsByID(ctx, getReq) - if err != nil { - return nil, err - } - - if len(getRes.Executions) != 1 { - return nil, fmt.Errorf("Expected to get one Execution, got %d instead. Request: %v", len(getRes.Executions), getReq) - } - - return &Execution{ - pipeline: pipeline, - execution: getRes.Executions[0], - }, nil -} - -// PrePublishExecution updates an existing MLMD execution with Pod info. -func (c *Client) PrePublishExecution(ctx context.Context, execution *Execution, config *ExecutionConfig) (*Execution, error) { - e := execution.execution - if e.CustomProperties == nil { - e.CustomProperties = make(map[string]*pb.Value) - } - e.CustomProperties[keyPodName] = StringValue(config.PodName) - e.CustomProperties[keyPodUID] = StringValue(config.PodUID) - e.CustomProperties[keyNamespace] = StringValue(config.Namespace) - e.LastKnownState = pb.Execution_RUNNING.Enum() - - _, err := c.svc.PutExecution(ctx, &pb.PutExecutionRequest{ - Execution: e, - }) - if err != nil { - return nil, err - } - return execution, nil -} - -// UpdateDAGExecutionState checks all the statuses of the tasks in the given DAG, based on that it will update the DAG to the corresponding status if necessary. -func (c *Client) UpdateDAGExecutionsState(ctx context.Context, dag *DAG, pipeline *Pipeline) error { - tasks, err := c.GetExecutionsInDAG(ctx, dag, pipeline, true) - if err != nil { - return err - } - - totalDagTasks := dag.Execution.execution.CustomProperties["total_dag_tasks"].GetIntValue() - - glog.V(4).Infof("tasks: %v", tasks) - glog.V(4).Infof("Checking Tasks' State") - completedTasks := 0 - failedTasks := 0 - for _, task := range tasks { - taskState := task.GetExecution().LastKnownState.String() - glog.V(4).Infof("task: %s", task.TaskName()) - glog.V(4).Infof("task state: %s", taskState) - switch taskState { - case "FAILED": - failedTasks++ - case "COMPLETE": - completedTasks++ - case "CACHED": - completedTasks++ - case "CANCELED": - completedTasks++ - } - } - glog.V(4).Infof("completedTasks: %d", completedTasks) - glog.V(4).Infof("failedTasks: %d", failedTasks) - glog.V(4).Infof("totalTasks: %d", totalDagTasks) - - glog.Infof("Attempting to update DAG state") - if completedTasks == int(totalDagTasks) { - c.PutDAGExecutionState(ctx, dag.Execution.GetID(), pb.Execution_COMPLETE) - } else if failedTasks > 0 { - c.PutDAGExecutionState(ctx, dag.Execution.GetID(), pb.Execution_FAILED) - } else { - glog.V(4).Infof("DAG is still running") - } - return nil -} - -// PutDAGExecutionState updates the given DAG Id to the state provided. -func (c *Client) PutDAGExecutionState(ctx context.Context, executionID int64, state pb.Execution_State) error { - - e, err := c.GetExecution(ctx, executionID) - if err != nil { - return err - } - e.execution.LastKnownState = state.Enum() - _, err = c.svc.PutExecution(ctx, &pb.PutExecutionRequest{ - Execution: e.execution, - }) - return err -} - -// GetExecutions ... -func (c *Client) GetExecutions(ctx context.Context, ids []int64) ([]*pb.Execution, error) { - req := &pb.GetExecutionsByIDRequest{ExecutionIds: ids} - res, err := c.svc.GetExecutionsByID(ctx, req) - if err != nil { - return nil, err - } - return res.Executions, nil -} - -func (c *Client) GetExecution(ctx context.Context, id int64) (*Execution, error) { - executions, err := c.GetExecutions(ctx, []int64{id}) - if err != nil { - return nil, fmt.Errorf("get execution ID=%v: %w", id, err) - } - if len(executions) == 0 { - return nil, fmt.Errorf("execution ID=%v not found", id) - } - if len(executions) > 1 { - return nil, fmt.Errorf("got %v executions with ID=%v", len(executions), id) - } - execution := executions[0] - pipeline, err := c.GetPipelineFromExecution(ctx, execution.GetId()) - if err != nil { - return nil, err - } - return &Execution{execution: execution, pipeline: pipeline}, nil -} - -func (c *Client) GetPipelineFromExecution(ctx context.Context, id int64) (*Pipeline, error) { - pipelineCtxTypeID, err := c.getContextTypeID(ctx, pipelineContextType) - if err != nil { - return nil, err - } - runCtxTypeID, err := c.getContextTypeID(ctx, pipelineRunContextType) - if err != nil { - return nil, err - } - res, err := c.svc.GetContextsByExecution(ctx, &pb.GetContextsByExecutionRequest{ - ExecutionId: &id, - }) - if err != nil { - return nil, fmt.Errorf("get contexts of execution ID=%v: %w", id, err) - } - pipeline := &Pipeline{} - for _, context := range res.GetContexts() { - if context.GetTypeId() == pipelineCtxTypeID { - if pipeline.pipelineCtx != nil { - return nil, fmt.Errorf("multiple pipeline contexts found") - } - pipeline.pipelineCtx = context - } - if context.GetTypeId() == runCtxTypeID { - if pipeline.pipelineRunCtx != nil { - return nil, fmt.Errorf("multiple run contexts found") - } - pipeline.pipelineRunCtx = context - } - } - return pipeline, nil -} - -// GetExecutionsInDAG gets all executions in the DAG, and organize them -// into a map, keyed by task name. -func (c *Client) GetExecutionsInDAG(ctx context.Context, dag *DAG, pipeline *Pipeline, filter bool) (executionsMap map[string]*Execution, err error) { - defer func() { - if err != nil { - err = fmt.Errorf("failed to get executions in %s: %w", dag.Info(), err) - } - }() - executionsMap = make(map[string]*Execution) - // Documentation on query syntax: - // https://github.com/google/ml-metadata/blob/839c3501a195d340d2855b6ffdb2c4b0b49862c9/ml_metadata/proto/metadata_store.proto#L831 - // If filter is set to true, the MLMD call will only grab executions for the current DAG, else it would grab all the execution for the context which includes sub-DAGs. - parentDAGID := dag.Execution.GetID() - parentDAGFilter := "" - if filter { - parentDAGFilter = fmt.Sprintf("custom_properties.parent_dag_id.int_value = %v", parentDAGID) - } - - // Note, because MLMD does not have index on custom properties right now, we - // take a pipeline run context to limit the number of executions the DB needs to - // iterate through to find sub-executions. - - nextPageToken := "" - for { - res, err := c.svc.GetExecutionsByContext(ctx, &pb.GetExecutionsByContextRequest{ - ContextId: pipeline.pipelineRunCtx.Id, - Options: &pb.ListOperationOptions{ - FilterQuery: &parentDAGFilter, - NextPageToken: &nextPageToken, - }, - }) - if err != nil { - return nil, err - } - - execs := res.GetExecutions() - glog.V(4).Infof("execs: %v", execs) - for _, e := range execs { - execution := &Execution{execution: e} - glog.V(4).Infof("taskName before DAG injection: %s", execution.TaskName()) - // Sometimes components in nested DAGs have identical task names. We - // update all task names to include the DAG ID to avoid potential - // key collisions in the executions map. - taskName := GetTaskNameWithDagID(execution.TaskName(), parentDAGID) - glog.V(4).Infof("taskName after DAG Injection: %s", taskName) - glog.V(4).Infof("execution: %s", execution) - if taskName == "" { - if e.GetCustomProperties()[keyParentDagID] != nil { - return nil, fmt.Errorf("empty task name for execution ID: %v", execution.GetID()) - } - // When retrieving executions without the parentDAGFilter, the - // rootDAG execution is supplied but does not have an associated - // TaskName nor is the parentDagID set, therefore we won't - // include it in the executionsMap. - continue - } - // Handle for parallelFor subdags & their tasks that consume the - // values from the iterator. Within a ParallelFor DAG, the iteration - // DAGs share the same name. In order to avoid collisions in the - // taskMap, the iteration index will be appended to the taskName. - // This also fortifies against potential collisions of tasks across - // iterations. - if e.GetCustomProperties()[keyIterationIndex] != nil { - taskName = GetParallelForTaskName(taskName, e.GetCustomProperties()[keyIterationIndex].GetIntValue()) - - } else if dag.Execution.GetExecution().GetCustomProperties()[keyIterationIndex] != nil { - // Handle for tasks within a parallelFor subdag that do not - // consume the values from the iterator as input but rather the - // output of a task that does. - taskName = GetParallelForTaskName(taskName, dag.Execution.GetExecution().GetCustomProperties()[keyIterationIndex].GetIntValue()) - } - - existing, ok := executionsMap[taskName] - if ok { - // TODO: The failure to handle this results in a specific edge - // case which has yet to be solved for. If you have three nested - // pipelines: A, which calls B, which calls C, and B and C share - // a task that A does not have but depends on in a producer - // subtask, when GetExecutionsInDAG is called, it will raise - // this error. - - // TODO(Bobgy): to support retry, we need to handle multiple tasks with the same task name. - return nil, fmt.Errorf("two tasks have the same task name %q, id1=%v id2=%v", taskName, existing.GetID(), execution.GetID()) - } - executionsMap[taskName] = execution - } - - nextPageToken = res.GetNextPageToken() - - if nextPageToken == "" { - break - } - } - - return executionsMap, nil -} - -// GetEventsByArtifactIDs ... -func (c *Client) GetEventsByArtifactIDs(ctx context.Context, artifactIds []int64) ([]*pb.Event, error) { - req := &pb.GetEventsByArtifactIDsRequest{ArtifactIds: artifactIds} - res, err := c.svc.GetEventsByArtifactIDs(ctx, req) - if err != nil { - return nil, err - } - return res.Events, nil -} - -func (c *Client) GetArtifactName(ctx context.Context, artifactId int64) (string, error) { - mlmdEvents, err := c.GetEventsByArtifactIDs(ctx, []int64{artifactId}) - if err != nil { - return "", fmt.Errorf("faild when getting events with artifact id %v: %w", artifactId, err) - } - if len(mlmdEvents) == 0 { - glog.Infof("can't find any events with artifact id %v", artifactId) - return "", nil - } - event := mlmdEvents[0] - return getArtifactName(event.Path) -} - -// GetArtifacts ... -func (c *Client) GetArtifacts(ctx context.Context, ids []int64) ([]*pb.Artifact, error) { - req := &pb.GetArtifactsByIDRequest{ArtifactIds: ids} - res, err := c.svc.GetArtifactsByID(ctx, req) - if err != nil { - return nil, err - } - return res.Artifacts, nil -} - -// GetOutputArtifactsByExecutionId ... -// TODO: Support multiple artifacts someday, probably through the v2 engine. -func (c *Client) GetOutputArtifactsByExecutionId(ctx context.Context, executionId int64) (map[string]*OutputArtifact, error) { - getEventsByExecutionIDsReq := &pb.GetEventsByExecutionIDsRequest{ExecutionIds: []int64{executionId}} - getEventsByExecutionIDsRes, err := c.svc.GetEventsByExecutionIDs(ctx, getEventsByExecutionIDsReq) - if err != nil { - return nil, fmt.Errorf("failed to get events with execution id %v: %w", executionId, err) - } - var outputArtifactsIDs []int64 - outputArtifactNamesById := make(map[int64]string) - for _, event := range getEventsByExecutionIDsRes.Events { - if *event.Type == pb.Event_OUTPUT { - outputArtifactsIDs = append(outputArtifactsIDs, event.GetArtifactId()) - artifactName, err := getArtifactName(event.Path) - if err != nil { - return nil, err - } - outputArtifactNamesById[event.GetArtifactId()] = artifactName - } - } - outputArtifacts, err := c.GetArtifacts(ctx, outputArtifactsIDs) - if err != nil { - return nil, fmt.Errorf("failed to get output artifacts: %w", err) - } - outputArtifactsByName := make(map[string]*OutputArtifact) - for _, outputArtifact := range outputArtifacts { - name, ok := outputArtifactNamesById[outputArtifact.GetId()] - if !ok { - return nil, fmt.Errorf("failed to get name of artifact with id %v", outputArtifact.GetId()) - } - outputArtifactsByName[name] = &OutputArtifact{ - Name: name, - Artifact: outputArtifact, - Schema: "", // TODO(Bobgy): figure out how to get schema - } - } - return outputArtifactsByName, nil -} - -func (c *Client) GetInputArtifactsByExecutionID(ctx context.Context, executionID int64) (inputs map[string]*pipelinespec.ArtifactList, err error) { - defer func() { - if err != nil { - err = fmt.Errorf("GetInputArtifactsByExecution(id=%v) failed: %w", executionID, err) - } - }() - eventsReq := &pb.GetEventsByExecutionIDsRequest{ExecutionIds: []int64{executionID}} - eventsRes, err := c.svc.GetEventsByExecutionIDs(ctx, eventsReq) - if err != nil { - return nil, err - } - var artifactIDs []int64 - nameByID := make(map[int64]string) - for _, event := range eventsRes.Events { - if *event.Type == pb.Event_INPUT { - artifactIDs = append(artifactIDs, event.GetArtifactId()) - name, err := getArtifactName(event.Path) - if err != nil { - return nil, err - } - nameByID[event.GetArtifactId()] = name - } - } - artifacts, err := c.GetArtifacts(ctx, artifactIDs) - if err != nil { - return nil, err - } - inputs = make(map[string]*pipelinespec.ArtifactList) - for _, artifact := range artifacts { - name, ok := nameByID[artifact.GetId()] - if !ok { - return nil, fmt.Errorf("failed to get name of artifact with id %v", artifact.GetId()) - } - runtimeArtifact, err := toRuntimeArtifact(artifact) - if err != nil { - return nil, err - } - inputs[name] = &pipelinespec.ArtifactList{ - Artifacts: []*pipelinespec.RuntimeArtifact{runtimeArtifact}, - } - } - return inputs, nil -} - -// Only supports schema titles for now. -type schemaObject struct { - Title string `yaml:"title"` -} - -func SchemaToArtifactType(schema string) (*pb.ArtifactType, error) { - so := &schemaObject{} - if err := yaml.Unmarshal([]byte(schema), so); err != nil { - return nil, err - } - - // TODO: Also parse properties. - if so.Title == "" { - glog.Fatal("No title specified") - } - at := &pb.ArtifactType{Name: proto.String(so.Title)} - return at, nil -} - -// RecordArtifact ... -func (c *Client) RecordArtifact(ctx context.Context, outputName, schema string, runtimeArtifact *pipelinespec.RuntimeArtifact, state pb.Artifact_State, bucketConfig *objectstore.Config) (*OutputArtifact, error) { - artifact, err := toMLMDArtifact(runtimeArtifact) - if err != nil { - return nil, err - } - at, err := SchemaToArtifactType(schema) - if err != nil { - return nil, err - } - putTypeRes, err := c.svc.PutArtifactType(ctx, &pb.PutArtifactTypeRequest{ArtifactType: at}) - if err != nil { - return nil, err - } - at.Id = putTypeRes.TypeId - - artifact.TypeId = at.Id - artifact.State = &state - if artifact.CustomProperties == nil { - artifact.CustomProperties = make(map[string]*pb.Value) - } - if _, ok := artifact.CustomProperties["display_name"]; !ok { - // display name default value - artifact.CustomProperties["display_name"] = StringValue(outputName) - } - - // An artifact can belong to an external store specified via kfp-launcher - // or via executor environment (e.g. IRSA) - // This allows us to easily identify where to locate the artifact both - // in user executor environment as well as in kfp ui - if _, ok := artifact.CustomProperties["store_session_info"]; !ok { - storeSessionInfoJSON, err1 := json.Marshal(bucketConfig.SessionInfo) - if err1 != nil { - return nil, err1 - } - storeSessionInfoStr := string(storeSessionInfoJSON) - artifact.CustomProperties["store_session_info"] = StringValue(storeSessionInfoStr) - } - - res, err := c.svc.PutArtifacts(ctx, &pb.PutArtifactsRequest{ - Artifacts: []*pb.Artifact{artifact}, - }) - if err != nil { - return nil, err - } - if len(res.ArtifactIds) != 1 { - return nil, errors.New("Failed to insert exactly one artifact") - } - - getRes, err := c.svc.GetArtifactsByID(ctx, &pb.GetArtifactsByIDRequest{ArtifactIds: res.ArtifactIds}) - if err != nil { - return nil, err - } - if len(getRes.Artifacts) != 1 { - return nil, errors.New("Failed to retrieve exactly one artifact") - } - return &OutputArtifact{ - Artifact: getRes.Artifacts[0], - Name: outputName, // runtimeArtifact.Name is in fact artifact ID, we need to pass name separately - Schema: runtimeArtifact.GetType().GetInstanceSchema(), - }, nil -} - -// TODO consider batching these requests -// TODO(lingqinggan): need to create artifact types during initiation, and only allow these types. -// Currently we allow users to create any artifact type. -func (c *Client) GetOrInsertArtifactType(ctx context.Context, schema string) (typeID int64, err error) { - defer func() { - if err != nil { - err = fmt.Errorf("getOrInsertArtifactType(schema=%q) failed: %w", schema, err) - } - }() - at, err := SchemaToArtifactType(schema) - if err != nil { - return 0, fmt.Errorf("converting schema to artifact type failed: %w", err) - } - getTypesRes, err := c.svc.GetArtifactType(ctx, &pb.GetArtifactTypeRequest{TypeName: at.Name}) - if err == nil { - if getTypesRes.GetArtifactType() != nil { - return getTypesRes.GetArtifactType().GetId(), nil - } - } - // If artifact type is empty, create one - putTypeRes, err := c.svc.PutArtifactType(ctx, &pb.PutArtifactTypeRequest{ArtifactType: at}) - if err != nil { - return 0, fmt.Errorf("PutArtifactType failed: %w", err) - } - return putTypeRes.GetTypeId(), err -} - -func (c *Client) FindMatchedArtifact(ctx context.Context, artifactToMatch *pb.Artifact, pipelineContextId int64) (matchedArtifact *pb.Artifact, err error) { - defer func() { - if err != nil { - err = fmt.Errorf("FindMatchedArtifact(artifact=%q) failed: %w", artifactToMatch, err) - } - }() - uris := []string{artifactToMatch.GetUri()} - getArtifactsByUriRes, err := c.svc.GetArtifactsByURI(ctx, &pb.GetArtifactsByURIRequest{Uris: uris}) - if err != nil { - return nil, err - } - for _, candidateArtifact := range getArtifactsByUriRes.GetArtifacts() { - matched, err := c.matchedArtifactOrNot(ctx, artifactToMatch, candidateArtifact, pipelineContextId) - if err != nil { - return nil, err - } - if matched { - return candidateArtifact, nil - } - } - return nil, nil - -} - -func (c *Client) matchedArtifactOrNot(ctx context.Context, target *pb.Artifact, candidate *pb.Artifact, pipelineContextId int64) (bool, error) { - if target.GetTypeId() != candidate.GetTypeId() || target.GetState() != candidate.GetState() || target.GetUri() != candidate.GetUri() { - return false, nil - } - for target_k, target_v := range target.GetCustomProperties() { - val, ok := candidate.GetCustomProperties()[target_k] - if !ok || !proto.Equal(target_v, val) { - return false, nil - } - } - res, err := c.svc.GetContextsByArtifact(ctx, &pb.GetContextsByArtifactRequest{ArtifactId: candidate.Id}) - if err != nil { - return false, fmt.Errorf("failed to get contextsByArtifact with artifactID=%q: %w", candidate.GetId(), err) - } - for _, c := range res.GetContexts() { - if c.GetId() == pipelineContextId { - return true, nil - } - } - return false, nil -} - -func (c *Client) getContextTypeID(ctx context.Context, contextType *pb.ContextType) (typeID int64, err error) { - defer func() { - if err != nil { - err = fmt.Errorf("getContextTypeID(name=%q) failed: %w", contextType.GetName(), err) - } - }() - cached, ok := c.ctxTypeCache.Load(contextType.GetName()) - if ok { - typeID, ok = cached.(int64) - if !ok { - return 0, fmt.Errorf("bug: incorrect value type cached") - } - return typeID, nil - } - res, err := c.svc.GetContextType(ctx, &pb.GetContextTypeRequest{TypeName: contextType.Name}) - if err == nil { // no error - c.ctxTypeCache.Store(contextType.GetName(), res.GetContextType().GetId()) - return res.GetContextType().GetId(), nil - } - if status.Convert(err).Code() != codes.NotFound { - return 0, err - } - // only not found error is expected - putRes, err := c.svc.PutContextType(ctx, &pb.PutContextTypeRequest{ContextType: contextType}) - if err == nil { // no error - c.ctxTypeCache.Store(contextType.GetName(), putRes.GetTypeId()) - return putRes.GetTypeId(), nil - } - if status.Convert(err).Code() != codes.AlreadyExists { - return 0, err - } - // It's expected other tasks may try to create the context type at the same time. - // Handle codes.AlreadyExists: - res, err = c.svc.GetContextType(ctx, &pb.GetContextTypeRequest{TypeName: contextType.Name}) - if err != nil { - return 0, err - } - c.ctxTypeCache.Store(contextType.GetName(), res.GetContextType().GetId()) - return res.GetContextType().GetId(), nil -} - -func (c *Client) getOrInsertContext(ctx context.Context, name string, contextType *pb.ContextType, customProps map[string]*pb.Value) (*pb.Context, error) { - // The most common case -- the context is already created by upstream tasks. - // So we try to get the context first. - getCtxRes, err := c.svc.GetContextByTypeAndName(ctx, &pb.GetContextByTypeAndNameRequest{TypeName: contextType.Name, ContextName: proto.String(name)}) - - if err != nil { - return nil, util.Wrap(err, fmt.Sprintf("Failed GetContextByTypeAndName(type=%q, name=%q)", contextType.GetName(), name)) - } - // Bug in MLMD GetContextsByTypeAndName? It doesn't return error even when no - // context was found. - if getCtxRes.Context != nil { - return getCtxRes.Context, nil - } - - // Get the ContextType ID. - typeID, err := c.getContextTypeID(ctx, contextType) - if err != nil { - return nil, err - } - // Next, create the Context. - putReq := &pb.PutContextsRequest{ - Contexts: []*pb.Context{ - { - Name: proto.String(name), - TypeId: proto.Int64(typeID), - CustomProperties: customProps, - }, - }, - } - _, err = c.svc.PutContexts(ctx, putReq) - // It's expected other tasks may try to create the context at the same time, - // so ignore AlreadyExists error. - if err != nil && status.Convert(err).Code() != codes.AlreadyExists { - return nil, fmt.Errorf("Failed PutContext(name=%q, type=%q, typeid=%v): %w", name, contextType.GetName(), typeID, err) - } - - // Get the created context. - getCtxRes, err = c.svc.GetContextByTypeAndName(ctx, &pb.GetContextByTypeAndNameRequest{TypeName: contextType.Name, ContextName: proto.String(name)}) - if err != nil { - return nil, fmt.Errorf("Failed GetContext(name=%q, type=%q): %w", name, contextType.GetName(), err) - } - return getCtxRes.GetContext(), nil -} - -func GenerateExecutionConfig(executorInput *pipelinespec.ExecutorInput) (*ExecutionConfig, error) { - ecfg := &ExecutionConfig{ - InputArtifactIDs: make(map[string][]int64), - } - - for name, artifactList := range executorInput.Inputs.Artifacts { - for _, artifact := range artifactList.Artifacts { - id, err := strconv.ParseInt(artifact.Name, 10, 64) - if err != nil { - return nil, fmt.Errorf("unable to parse input artifact id from %q: %w", id, err) - } - ecfg.InputArtifactIDs[name] = append(ecfg.InputArtifactIDs[name], id) - } - } - - ecfg.InputParameters = executorInput.Inputs.ParameterValues - return ecfg, nil -} - -func (c *Client) getContextByID(ctx context.Context, id int64) (*pb.Context, error) { - res, err := c.svc.GetContextsByID(ctx, &pb.GetContextsByIDRequest{ContextIds: []int64{id}}) - if err != nil { - return nil, fmt.Errorf("getContext(id=%v): %w", id, err) - } - contexts := res.GetContexts() - if len(contexts) > 1 { - return nil, fmt.Errorf("getContext(id=%v): got %v contexts, expect 1", id, len(contexts)) - } - if len(contexts) == 0 { - return nil, fmt.Errorf("getContext(id=%v): not found", id) - } - if contexts[0] == nil { - return nil, fmt.Errorf("getContext(id=%v): got nil context", id) - } - return contexts[0], nil -} diff --git a/backend/src/v2/metadata/client_fake.go b/backend/src/v2/metadata/client_fake.go deleted file mode 100644 index beaddcc098c..00000000000 --- a/backend/src/v2/metadata/client_fake.go +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright 2023 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package metadata contains types to record/retrieve metadata stored in MLMD -// for individual pipeline steps. - -package metadata - -import ( - "context" - - "github.com/kubeflow/pipelines/backend/src/v2/objectstore" - - "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" - pb "github.com/kubeflow/pipelines/third_party/ml-metadata/go/ml_metadata" - "google.golang.org/protobuf/types/known/structpb" -) - -type FakeClient struct { -} - -func NewFakeClient() *FakeClient { - return &FakeClient{} -} - -func (c *FakeClient) GetPipeline(ctx context.Context, pipelineName, runID, namespace, runResource, pipelineRoot string, storeSessionInfo string) (*Pipeline, error) { - return nil, nil -} - -func (c *FakeClient) GetDAG(ctx context.Context, executionID int64) (*DAG, error) { - return nil, nil -} - -func (c *FakeClient) PublishExecution(ctx context.Context, execution *Execution, outputParameters map[string]*structpb.Value, outputArtifacts []*OutputArtifact, state pb.Execution_State) error { - return nil -} - -func (c *FakeClient) CreateExecution(ctx context.Context, pipeline *Pipeline, config *ExecutionConfig) (*Execution, error) { - return nil, nil -} -func (c *FakeClient) PrePublishExecution(ctx context.Context, execution *Execution, config *ExecutionConfig) (*Execution, error) { - return nil, nil -} - -func (c *FakeClient) GetExecutions(ctx context.Context, ids []int64) ([]*pb.Execution, error) { - return nil, nil -} - -func (c *FakeClient) GetExecution(ctx context.Context, id int64) (*Execution, error) { - return nil, nil -} - -func (c *FakeClient) GetPipelineFromExecution(ctx context.Context, id int64) (*Pipeline, error) { - return nil, nil -} - -func (c *FakeClient) GetExecutionsInDAG(ctx context.Context, dag *DAG, pipeline *Pipeline, filter bool) (executionsMap map[string]*Execution, err error) { - return nil, nil -} -func (c *FakeClient) UpdateDAGExecutionsState(ctx context.Context, dag *DAG, pipeline *Pipeline) (err error) { - return nil -} -func (c *FakeClient) PutDAGExecutionState(ctx context.Context, executionID int64, state pb.Execution_State) (err error) { - return nil -} -func (c *FakeClient) GetEventsByArtifactIDs(ctx context.Context, artifactIds []int64) ([]*pb.Event, error) { - return nil, nil -} - -func (c *FakeClient) GetArtifactName(ctx context.Context, artifactId int64) (string, error) { - return "", nil -} -func (c *FakeClient) GetArtifacts(ctx context.Context, ids []int64) ([]*pb.Artifact, error) { - return nil, nil -} - -func (c *FakeClient) GetOutputArtifactsByExecutionId(ctx context.Context, executionId int64) (map[string]*OutputArtifact, error) { - return nil, nil -} - -func (c *FakeClient) RecordArtifact(ctx context.Context, outputName, schema string, runtimeArtifact *pipelinespec.RuntimeArtifact, state pb.Artifact_State, bucketConfig *objectstore.Config) (*OutputArtifact, error) { - return nil, nil -} - -func (c *FakeClient) GetOrInsertArtifactType(ctx context.Context, schema string) (typeID int64, err error) { - return 0, nil -} - -func (c *FakeClient) FindMatchedArtifact(ctx context.Context, artifactToMatch *pb.Artifact, pipelineContextId int64) (matchedArtifact *pb.Artifact, err error) { - return nil, nil -} diff --git a/backend/src/v2/metadata/client_test.go b/backend/src/v2/metadata/client_test.go deleted file mode 100644 index bfcc7c1e48e..00000000000 --- a/backend/src/v2/metadata/client_test.go +++ /dev/null @@ -1,354 +0,0 @@ -// Copyright 2021 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package metadata_test - -import ( - "context" - "crypto/tls" - "fmt" - "runtime/debug" - "sync" - "testing" - - "github.com/kubeflow/pipelines/backend/src/v2/metadata/testutils" - - "github.com/google/go-cmp/cmp" - "github.com/google/go-cmp/cmp/cmpopts" - "github.com/google/uuid" - "github.com/kubeflow/pipelines/backend/src/v2/metadata" - pb "github.com/kubeflow/pipelines/third_party/ml-metadata/go/ml_metadata" - "google.golang.org/protobuf/proto" - "google.golang.org/protobuf/testing/protocmp" -) - -// This test depends on a MLMD grpc server running at localhost:8080. -const ( - testMlmdServerAddress = "localhost" - testMlmdServerPort = "8080" - namespace = "kubeflow" - runResource = "workflows.argoproj.io/hello-world-abcd" - pipelineRoot = "gs://my-bucket/path/to/root" -) - -func Test_schemaToArtifactType(t *testing.T) { - tests := []struct { - name string - schema string - want *pb.ArtifactType - wantErr bool - }{ - { - name: "Parses Schema Title Correctly", - schema: "properties:\ntitle: kfp.Dataset\ntype: object\n", - want: &pb.ArtifactType{ - Name: proto.String("kfp.Dataset"), - }, - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := metadata.SchemaToArtifactType(tt.schema) - if (err != nil) != tt.wantErr { - t.Errorf("schemaToArtifactType() error = %v, wantErr %v", err, tt.wantErr) - return - } - if diff := cmp.Diff(got, tt.want, cmpopts.EquateEmpty(), protocmp.Transform()); diff != "" { - t.Errorf("schemaToArtifactType() = %+v, want %+v\nDiff (-want, +got)\n%s", got, tt.want, diff) - } - }) - } -} - -func Test_GetPipeline(t *testing.T) { - t.Skip("Temporarily disable the test that requires cluster connection.") - - fatalIf := func(err error) { - if err != nil { - debug.PrintStack() - t.Fatal(err) - } - } - - ctx := context.Background() - runUuid, err := uuid.NewRandom() - fatalIf(err) - runId := runUuid.String() - client, err := metadata.NewClient(testMlmdServerAddress, testMlmdServerPort, &tls.Config{}) - fatalIf(err) - mlmdClient, err := testutils.NewTestMlmdClient(testMlmdServerAddress, testMlmdServerPort, false, "") - fatalIf(err) - - pipeline, err := client.GetPipeline(ctx, "get-pipeline-test", runId, namespace, runResource, pipelineRoot, "") - fatalIf(err) - expectPipelineRoot := fmt.Sprintf("%s/get-pipeline-test/%s", pipelineRoot, runId) - if pipeline.GetPipelineRoot() != expectPipelineRoot { - t.Errorf("client.GetPipeline(pipelineRoot=%q)=%q, expect %q", pipelineRoot, pipeline.GetPipelineRoot(), expectPipelineRoot) - } - runCtxType := "system.PipelineRun" - pipelineName := "get-pipeline-test" - - res, err := mlmdClient.GetContextByTypeAndName(ctx, &pb.GetContextByTypeAndNameRequest{ - TypeName: &runCtxType, - ContextName: &runId, - }) - fatalIf(err) - if res.GetContext() == nil { - t.Fatalf("GetContextByTypeAndName(name=%q, type=%q)=nil", runId, runCtxType) - } - resParents, err := mlmdClient.GetParentContextsByContext(ctx, &pb.GetParentContextsByContextRequest{ - ContextId: res.GetContext().Id, - }) - fatalIf(err) - parents := resParents.GetContexts() - if len(parents) != 1 { - t.Errorf("Got %v parent contexts, want 1", len(parents)) - } - pipelineCtx := parents[0] - if pipelineCtx.GetName() != pipelineName { - t.Errorf("GetParentContextsByContext(name=%q, type=%q)=Context(name=%q), want Context(name=%q)", - runId, runCtxType, pipelineCtx.GetName(), pipelineName) - } -} - -func Test_GetPipeline_Twice(t *testing.T) { - t.Skip("Temporarily disable the test that requires cluster connection.") - - fatalIf := func(err error) { - if err != nil { - debug.PrintStack() - t.Fatal(err) - } - } - - ctx := context.Background() - runUuid, err := uuid.NewRandom() - fatalIf(err) - runId := runUuid.String() - client, err := metadata.NewClient(testMlmdServerAddress, testMlmdServerPort, &tls.Config{}) - fatalIf(err) - - pipeline, err := client.GetPipeline(ctx, "get-pipeline-test", runId, namespace, runResource, pipelineRoot, "") - fatalIf(err) - // The second call to GetPipeline won't fail because it avoid inserting to MLMD again. - samePipeline, err := client.GetPipeline(ctx, "get-pipeline-test", runId, namespace, runResource, pipelineRoot, "") - fatalIf(err) - if pipeline.GetCtxID() != samePipeline.GetCtxID() { - t.Errorf("Expect pipeline context ID %d, actual is %d", pipeline.GetCtxID(), samePipeline.GetCtxID()) - } -} - -func Test_GetPipelineFromExecution(t *testing.T) { - t.Skip("Temporarily disable the test that requires cluster connection.") - - fatalIf := func(err error) { - if err != nil { - debug.PrintStack() - t.Fatal(err) - } - } - client := newLocalClientOrFatal(t) - ctx := context.Background() - pipeline, err := client.GetPipeline(ctx, "get-pipeline-from-execution", newUUIDOrFatal(t), "kubeflow", "workflow/abc", "gs://my-bucket/root", "") - fatalIf(err) - execution, err := client.CreateExecution(ctx, pipeline, &metadata.ExecutionConfig{ - TaskName: "task1", - ExecutionType: metadata.ContainerExecutionTypeName, - }) - fatalIf(err) - gotPipeline, err := client.GetPipelineFromExecution(ctx, execution.GetID()) - fatalIf(err) - if gotPipeline.GetRunCtxID() != pipeline.GetRunCtxID() { - t.Errorf("client.GetPipelineFromExecution(id=%v)=Pipeline(runCtxID=%v), expect Pipeline(runCtxID=%v)", execution.GetID(), gotPipeline.GetRunCtxID(), pipeline.GetRunCtxID()) - } -} - -func Test_GetPipelineConcurrently(t *testing.T) { - t.Skip("Temporarily disable the test that requires cluster connection.") - - // This test depends on a MLMD grpc server running at localhost:8080. - client, err := metadata.NewClient("localhost", "8080", &tls.Config{}) - if err != nil { - t.Fatal(err) - } - runId, err := uuid.NewRandom() - if err != nil { - t.Fatal(err) - } - runIdText := runId.String() - var wg sync.WaitGroup - ctx := context.Background() - // Simulates 5 concurrent tasks trying to create the same pipeline contexts. - for i := 0; i < 5; i++ { - wg.Add(1) - go func() { - defer wg.Done() - _, err := client.GetPipeline(ctx, fmt.Sprintf("get-pipeline-concurrently-test-%s", runIdText), runIdText, namespace, "workflows.argoproj.io/hello-world-"+runIdText, pipelineRoot, "") - if err != nil { - t.Error(err) - } - }() - } - wg.Wait() - // Then another 5 concurrent tasks. - for i := 0; i < 5; i++ { - wg.Add(1) - go func() { - defer wg.Done() - _, err := client.GetPipeline(ctx, fmt.Sprintf("get-pipeline-concurrently-test-%s", runIdText), runIdText, namespace, "workflows.argoproj.io/hello-world-"+runIdText, pipelineRoot, "") - if err != nil { - t.Error(err) - } - }() - } - wg.Wait() -} - -func Test_GenerateOutputURI(t *testing.T) { - // Const define the artifact name - const ( - pipelineName = "my-pipeline-name" - runID = "my-run-id" - pipelineRoot = "minio://mlpipeline/v2/artifacts" - pipelineRootQuery = "?query=string&another=query" - ) - tests := []struct { - name string - queryString string - paths []string - preserveQueryString bool - want string - }{ - { - name: "plain pipeline root without preserveQueryString", - queryString: "", - paths: []string{pipelineName, runID}, - preserveQueryString: false, - want: fmt.Sprintf("%s/%s/%s", pipelineRoot, pipelineName, runID), - }, - { - name: "plain pipeline root with preserveQueryString", - queryString: "", - paths: []string{pipelineName, runID}, - preserveQueryString: true, - want: fmt.Sprintf("%s/%s/%s", pipelineRoot, pipelineName, runID), - }, - { - name: "pipeline root with query string without preserveQueryString", - queryString: pipelineRootQuery, - paths: []string{pipelineName, runID}, - preserveQueryString: false, - want: fmt.Sprintf("%s/%s/%s", pipelineRoot, pipelineName, runID), - }, - { - name: "pipeline root with query string with preserveQueryString", - queryString: pipelineRootQuery, - paths: []string{pipelineName, runID}, - preserveQueryString: true, - want: fmt.Sprintf("%s/%s/%s%s", pipelineRoot, pipelineName, runID, pipelineRootQuery), - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got := metadata.GenerateOutputURI(fmt.Sprintf("%s%s", pipelineRoot, tt.queryString), tt.paths, tt.preserveQueryString) - if diff := cmp.Diff(got, tt.want); diff != "" { - t.Errorf("GenerateOutputURI() = %v, want %v\nDiff (-want, +got)\n%s", got, tt.want, diff) - } - }) - } -} - -func Test_DAG(t *testing.T) { - t.Skip("Temporarily disable the test that requires cluster connection.") - - client := newLocalClientOrFatal(t) - ctx := context.Background() - // These parameters do not matter. - pipeline, err := client.GetPipeline(ctx, "pipeline-name", newUUIDOrFatal(t), "ns1", "workflow/pipeline-1234", pipelineRoot, "") - if err != nil { - t.Fatal(err) - } - root, err := client.CreateExecution(ctx, pipeline, &metadata.ExecutionConfig{ - TaskName: "root", - ExecutionType: metadata.DagExecutionTypeName, - ParentDagID: 0, // this is root DAG - }) - if err != nil { - t.Fatal(err) - } - task1DAG, err := client.CreateExecution(ctx, pipeline, &metadata.ExecutionConfig{ - TaskName: "task1", - ExecutionType: metadata.DagExecutionTypeName, - ParentDagID: root.GetID(), - }) - if err != nil { - t.Fatal(err) - } - task1ChildA, err := client.CreateExecution(ctx, pipeline, &metadata.ExecutionConfig{ - TaskName: "task1ChildA", - ExecutionType: metadata.ContainerExecutionTypeName, - ParentDagID: task1DAG.GetID(), - }) - if err != nil { - t.Fatal(err) - } - task2, err := client.CreateExecution(ctx, pipeline, &metadata.ExecutionConfig{ - TaskName: "task2", - ExecutionType: metadata.ContainerExecutionTypeName, - ParentDagID: root.GetID(), - }) - if err != nil { - t.Fatal(err) - } - rootDAG := &metadata.DAG{Execution: root} - rootChildren, err := client.GetExecutionsInDAG(ctx, rootDAG, pipeline, true) - if err != nil { - t.Fatal(err) - } - if len(rootChildren) != 2 { - t.Errorf("len(rootChildren)=%v, expect 2", len(rootChildren)) - } - if rootChildren["task1"].GetID() != task1DAG.GetID() { - t.Errorf("executions[\"task1\"].GetID()=%v, task1.GetID()=%v. Not equal", rootChildren["task1"].GetID(), task1DAG.GetID()) - } - if rootChildren["task2"].GetID() != task2.GetID() { - t.Errorf("executions[\"task2\"].GetID()=%v, task2.GetID()=%v. Not equal", rootChildren["task2"].GetID(), task2.GetID()) - } - task1Children, err := client.GetExecutionsInDAG(ctx, &metadata.DAG{Execution: task1DAG}, pipeline, true) - if len(task1Children) != 1 { - t.Errorf("len(task1Children)=%v, expect 1", len(task1Children)) - } - if task1Children["task1ChildA"].GetID() != task1ChildA.GetID() { - t.Errorf("executions[\"task1ChildA\"].GetID()=%v, task1ChildA.GetID()=%v. Not equal", task1Children["task1ChildA"].GetID(), task1ChildA.GetID()) - } -} - -func newLocalClientOrFatal(t *testing.T) *metadata.Client { - t.Helper() - client, err := metadata.NewClient("localhost", "8080", &tls.Config{}) - if err != nil { - t.Fatalf("metadata.NewClient failed: %v", err) - } - return client -} - -func newUUIDOrFatal(t *testing.T) string { - t.Helper() - uuid, err := uuid.NewRandom() - if err != nil { - t.Fatalf("uuid.NewRandom failed: %v", err) - } - return uuid.String() -} diff --git a/backend/src/v2/metadata/converter.go b/backend/src/v2/metadata/converter.go deleted file mode 100644 index 202c5c6b130..00000000000 --- a/backend/src/v2/metadata/converter.go +++ /dev/null @@ -1,216 +0,0 @@ -package metadata - -import ( - "encoding/json" - "fmt" - "strconv" - "strings" - - "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" - pb "github.com/kubeflow/pipelines/third_party/ml-metadata/go/ml_metadata" - "google.golang.org/protobuf/encoding/protojson" - "google.golang.org/protobuf/types/known/structpb" -) - -func PbValueToText(v *structpb.Value) (string, error) { - wrap := func(err error) error { - return fmt.Errorf("failed to convert protobuf.Value to text: %w", err) - } - if v == nil { - return "", nil - } - var text string - switch t := v.Kind.(type) { - case *structpb.Value_NullValue: - text = "" - case *structpb.Value_StringValue: - text = v.GetStringValue() - case *structpb.Value_NumberValue: - text = strconv.FormatFloat(v.GetNumberValue(), 'f', -1, 64) - case *structpb.Value_BoolValue: - text = strconv.FormatBool(v.GetBoolValue()) - case *structpb.Value_ListValue: - b, err := json.Marshal(v.GetListValue()) - if err != nil { - return "", wrap(fmt.Errorf("failed to JSON-marshal a list: %w", err)) - } - text = string(b) - case *structpb.Value_StructValue: - b, err := json.Marshal(v.GetStructValue()) - if err != nil { - return "", wrap(fmt.Errorf("failed to JSON-marshal a struct: %w", err)) - } - text = string(b) - default: - return "", wrap(fmt.Errorf("unknown type %T", t)) - } - return text, nil -} - -func TextToPbValue(text string, t pipelinespec.ParameterType_ParameterTypeEnum) (*structpb.Value, error) { - msg := func(err error) error { - return fmt.Errorf("TextToPbValue(text=%q, type=%q) failed: %w", text, t, err) - } - switch t { - case pipelinespec.ParameterType_STRING: - return structpb.NewStringValue(text), nil - case pipelinespec.ParameterType_NUMBER_INTEGER: - i, err := strconv.ParseInt(strings.TrimSpace(text), 10, 0) - if err != nil { - return nil, msg(err) - } - return structpb.NewNumberValue(float64(i)), nil - case pipelinespec.ParameterType_NUMBER_DOUBLE: - f, err := strconv.ParseFloat(strings.TrimSpace(text), 0) - if err != nil { - return nil, msg(err) - } - return structpb.NewNumberValue(f), nil - case pipelinespec.ParameterType_BOOLEAN: - v, err := strconv.ParseBool(strings.TrimSpace(text)) - if err != nil { - return nil, msg(err) - } - return structpb.NewBoolValue(v), nil - case pipelinespec.ParameterType_LIST: - v := &structpb.Value{} - if err := v.UnmarshalJSON([]byte(text)); err != nil { - return nil, msg(err) - } - if _, ok := v.GetKind().(*structpb.Value_ListValue); !ok { - return nil, msg(fmt.Errorf("unexpected type")) - } - return v, nil - case pipelinespec.ParameterType_STRUCT: - v := &structpb.Value{} - if err := v.UnmarshalJSON([]byte(text)); err != nil { - return nil, msg(err) - } - if _, ok := v.GetKind().(*structpb.Value_StructValue); !ok { - return nil, msg(fmt.Errorf("unexpected type")) - } - return v, nil - default: - return nil, msg(fmt.Errorf("unknown type. Expected STRING, NUMBER_INTEGER, NUMBER_DOUBLE, BOOLEAN, LIST or STRUCT")) - } -} - -func stringMLMDValue(v string) *pb.Value { - return &pb.Value{Value: &pb.Value_StringValue{StringValue: v}} -} - -func doubleMLMDValue(v float64) *pb.Value { - return &pb.Value{Value: &pb.Value_DoubleValue{DoubleValue: v}} -} - -func intMLMDValue(v int64) *pb.Value { - return &pb.Value{Value: &pb.Value_IntValue{IntValue: v}} -} - -func toMLMDArtifact(runtimeArtifact *pipelinespec.RuntimeArtifact) (*pb.Artifact, error) { - errorF := func(err error) error { - return fmt.Errorf("failed to convert RuntimeArtifact to MLMD artifact: %w", err) - } - artifact := &pb.Artifact{ - Uri: &runtimeArtifact.Uri, - Properties: make(map[string]*pb.Value), - CustomProperties: make(map[string]*pb.Value), - } - - if runtimeArtifact.Metadata != nil { - for k, v := range runtimeArtifact.Metadata.Fields { - value, err := StructValueToMLMDValue(v) - if err != nil { - return nil, errorF(err) - } - artifact.CustomProperties[k] = value - } - } - - return artifact, nil -} - -func StructValueToMLMDValue(v *structpb.Value) (*pb.Value, error) { - boolToInt := func(b bool) int64 { - if b { - return 1 - } - return 0 - } - - switch t := v.Kind.(type) { - case *structpb.Value_StringValue: - return stringMLMDValue(v.GetStringValue()), nil - case *structpb.Value_NumberValue: - return doubleMLMDValue(v.GetNumberValue()), nil - case *structpb.Value_BoolValue: - return intMLMDValue(boolToInt(v.GetBoolValue())), nil - case *structpb.Value_ListValue: - return &pb.Value{ - Value: &pb.Value_StructValue{ - StructValue: &structpb.Struct{ - Fields: map[string]*structpb.Value{ - "list": {Kind: &structpb.Value_ListValue{ListValue: v.GetListValue()}}}}}, - }, nil - case *structpb.Value_StructValue: - return &pb.Value{ - Value: &pb.Value_StructValue{ - StructValue: &structpb.Struct{ - Fields: map[string]*structpb.Value{ - "struct": {Kind: &structpb.Value_StructValue{StructValue: v.GetStructValue()}}}}}, - }, nil - // TODO: support null - default: - return nil, fmt.Errorf("unknown/unsupported value type %T", t) - } -} - -func UnmarshalRuntimeArtifact(bytes []byte) (*pipelinespec.RuntimeArtifact, error) { - a := &pb.Artifact{} - if err := protojson.Unmarshal(bytes, a); err != nil { - return nil, fmt.Errorf("failed to unmarshall runtime artifact metadata: %w", err) - } - return toRuntimeArtifact(a) -} - -func toRuntimeArtifact(artifact *pb.Artifact) (*pipelinespec.RuntimeArtifact, error) { - errorF := func(err error) (*pipelinespec.RuntimeArtifact, error) { - return nil, fmt.Errorf("failed to convert MLMD artifact to RuntimeArtifact: %w", err) - } - - rta := &pipelinespec.RuntimeArtifact{ - Name: strconv.FormatInt(artifact.GetId(), 10), - Uri: artifact.GetUri(), - Metadata: &structpb.Struct{ - Fields: make(map[string]*structpb.Value), - }, - } - - propertiesToMetadata := func(properties map[string]*pb.Value) error { - for k, p := range properties { - value := &structpb.Value{} - switch t := p.Value.(type) { - case *pb.Value_StringValue: - value.Kind = &structpb.Value_StringValue{StringValue: p.GetStringValue()} - case *pb.Value_DoubleValue: - value.Kind = &structpb.Value_NumberValue{NumberValue: p.GetDoubleValue()} - case *pb.Value_IntValue: - value.Kind = &structpb.Value_NumberValue{NumberValue: float64(p.GetIntValue())} - case *pb.Value_StructValue: - value.Kind = &structpb.Value_StructValue{StructValue: p.GetStructValue()} - default: - return fmt.Errorf("unknown property type in MLMD artifact: %T", t) - } - rta.Metadata.Fields[k] = value - } - return nil - } - if err := propertiesToMetadata(artifact.Properties); err != nil { - return errorF(err) - } - if err := propertiesToMetadata(artifact.CustomProperties); err != nil { - return errorF(err) - } - - return rta, nil -} diff --git a/backend/src/v2/metadata/env.go b/backend/src/v2/metadata/env.go deleted file mode 100644 index 86481ef4ffe..00000000000 --- a/backend/src/v2/metadata/env.go +++ /dev/null @@ -1,18 +0,0 @@ -package metadata - -const ( - metadataGrpcServiceAddress = "metadata-grpc-service.kubeflow" - metadataGrpcServicePort = "8080" -) - -type ServerConfig struct { - Address string - Port string -} - -func DefaultConfig() *ServerConfig { - return &ServerConfig{ - Address: metadataGrpcServiceAddress, - Port: metadataGrpcServicePort, - } -} diff --git a/backend/src/v2/metadata/model.go b/backend/src/v2/metadata/model.go deleted file mode 100644 index a325b034192..00000000000 --- a/backend/src/v2/metadata/model.go +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright 2021 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package metadata contains types to record/retrieve metadata stored in MLMD -// for individual pipeline steps. -package metadata - -import ( - "fmt" - - pb "github.com/kubeflow/pipelines/third_party/ml-metadata/go/ml_metadata" - "google.golang.org/protobuf/types/known/structpb" -) - -// A hacky way to get Execution from pb.Execution, usually you should get -// an Execution from this metadata package directly without using ml_metadata.Execution -func NewExecution(e *pb.Execution) *Execution { - return &Execution{execution: e} -} - -func (e *Execution) GetParameters() (inputs, outputs map[string]*structpb.Value, err error) { - inputs = make(map[string]*structpb.Value) - outputs = make(map[string]*structpb.Value) - defer func() { - if err != nil { - err = fmt.Errorf("execution(ID=%v).GetParameters failed: %w", e.GetID(), err) - } - }() - if e == nil || e.execution == nil { - return nil, nil, nil - } - if stored_inputs, ok := e.execution.CustomProperties[keyInputs]; ok { - for name, value := range stored_inputs.GetStructValue().GetFields() { - inputs[name] = value - } - } - if stored_outputs, ok := e.execution.CustomProperties[keyOutputs]; ok { - for name, value := range stored_outputs.GetStructValue().GetFields() { - outputs[name] = value - } - } - return inputs, outputs, nil -} diff --git a/backend/src/v2/metadata/testutils/test_utils.go b/backend/src/v2/metadata/testutils/test_utils.go deleted file mode 100644 index 54ba9950cf5..00000000000 --- a/backend/src/v2/metadata/testutils/test_utils.go +++ /dev/null @@ -1,30 +0,0 @@ -package testutils - -import ( - "fmt" - - "github.com/kubeflow/pipelines/backend/test/v2" - pb "github.com/kubeflow/pipelines/third_party/ml-metadata/go/ml_metadata" - "google.golang.org/grpc" - "google.golang.org/grpc/credentials" - "google.golang.org/grpc/credentials/insecure" -) - -func NewTestMlmdClient(testMlmdServerAddress string, testMlmdServerPort string, tlsEnabled bool, caCertPath string) (pb.MetadataStoreServiceClient, error) { - creds := insecure.NewCredentials() - if tlsEnabled { - tlsCfg, err := test.GetTLSConfig(caCertPath) - if err != nil { - return nil, err - } - creds = credentials.NewTLS(tlsCfg) - } - dialOption := grpc.WithTransportCredentials(creds) - conn, err := grpc.Dial(fmt.Sprintf("%s:%s", testMlmdServerAddress, testMlmdServerPort), - dialOption, - ) - if err != nil { - return nil, fmt.Errorf("NewMlmdClient() failed: %w", err) - } - return pb.NewMetadataStoreServiceClient(conn), nil -} diff --git a/backend/src/v2/objectstore/config.go b/backend/src/v2/objectstore/config.go index 92d780b67ef..d22791c24cd 100644 --- a/backend/src/v2/objectstore/config.go +++ b/backend/src/v2/objectstore/config.go @@ -12,19 +12,21 @@ // See the License for the specific language governing permissions and // limitations under the License. -// This package contains helper methods for using object stores. +// Package objectstore contains helper methods for using object stores. package objectstore import ( - "encoding/json" + "crypto/sha256" + "encoding/hex" "fmt" + "net/url" "path" "regexp" "strconv" "strings" ) -// The endpoint uses Kubernetes service DNS name with namespace: +// DefaultMinioEndpointInMultiUserMode uses Kubernetes service DNS name with namespace: // https://kubernetes.io/docs/concepts/services-networking/service/#dns const DefaultMinioEndpointInMultiUserMode = "minio-service.kubeflow:9000" @@ -33,7 +35,6 @@ type Config struct { BucketName string Prefix string QueryString string - SessionInfo *SessionInfo } type SessionInfo struct { @@ -73,7 +74,7 @@ func (b *Config) bucketURL() string { } } - u = u + q + u += q return u } @@ -81,35 +82,17 @@ func (b *Config) PrefixedBucket() string { return b.Scheme + path.Join(b.BucketName, b.Prefix) } -func (b *Config) KeyFromURI(uri string) (string, error) { - prefixedBucket := b.PrefixedBucket() - if !strings.HasPrefix(uri, prefixedBucket) { - return "", fmt.Errorf("URI %q does not have expected bucket prefix %q", uri, prefixedBucket) - } - - key := strings.TrimLeft(strings.TrimPrefix(uri, prefixedBucket), "/") - if len(key) == 0 { - return "", fmt.Errorf("URI %q has empty key given prefixed bucket %q", uri, prefixedBucket) - } - return key, nil -} - -func (b *Config) UriFromKey(blobKey string) string { - return b.Scheme + path.Join(b.BucketName, b.Prefix, blobKey) +func (b *Config) Hash() string { + h := sha256.New() + h.Write([]byte(b.Scheme)) + h.Write([]byte(b.BucketName)) + h.Write([]byte(b.Prefix)) + h.Write([]byte(b.QueryString)) + return hex.EncodeToString(h.Sum(nil)) } var bucketPattern = regexp.MustCompile(`(^[a-z][a-z0-9]+:///?)([^/?]+)(/[^?]*)?(\?.+)?$`) -func ParseBucketConfig(path string, sess *SessionInfo) (*Config, error) { - config, err := ParseBucketPathToConfig(path) - if err != nil { - return nil, err - } - config.SessionInfo = sess - - return config, nil -} - func ParseBucketPathToConfig(path string) (*Config, error) { ms := bucketPattern.FindStringSubmatch(path) if ms == nil || len(ms) != 5 { @@ -123,7 +106,7 @@ func ParseBucketPathToConfig(path string) (*Config, error) { prefix := strings.TrimPrefix(ms[3], "/") if len(prefix) > 0 && !strings.HasSuffix(prefix, "/") { - prefix = prefix + "/" + prefix += "/" } return &Config{ @@ -134,21 +117,25 @@ func ParseBucketPathToConfig(path string) (*Config, error) { }, nil } -func ParseBucketConfigForArtifactURI(uri string) (*Config, error) { - ms := bucketPattern.FindStringSubmatch(uri) - if ms == nil || len(ms) != 5 { - return nil, fmt.Errorf("parse bucket config failed: unrecognized uri format: %q", uri) +func SplitObjectURI(uri string) (prefix, base string, err error) { + u, err := url.Parse(uri) + if err != nil { + return "", "", fmt.Errorf("invalid URI: %w", err) } - // TODO: Verify/add support for file:///. - if ms[1] != "gs://" && ms[1] != "s3://" && ms[1] != "minio://" && ms[1] != "mem://" { - return nil, fmt.Errorf("parse bucket config failed: unsupported Cloud bucket: %q", uri) - } + // Trim trailing slash (if any) + cleanPath := strings.TrimSuffix(u.Path, "/") - return &Config{ - Scheme: ms[1], - BucketName: ms[2], - }, nil + // Get base name and dir prefix + base = path.Base(cleanPath) + dir := path.Dir(cleanPath) + + // Reconstruct prefix (scheme + host + dir) + prefix = fmt.Sprintf("%s://%s", u.Scheme, u.Host) + if dir != "." && dir != "/" { + prefix += dir + } + return prefix, base, nil } // ParseProviderFromPath prases the uri and returns the scheme, which is @@ -161,18 +148,6 @@ func ParseProviderFromPath(uri string) (string, error) { return strings.TrimSuffix(bucketConfig.Scheme, "://"), nil } -func GetSessionInfoFromString(sessionInfoJSON string) (*SessionInfo, error) { - sessionInfo := &SessionInfo{} - if sessionInfoJSON == "" { - return nil, nil - } - err := json.Unmarshal([]byte(sessionInfoJSON), sessionInfo) - if err != nil { - return nil, fmt.Errorf("Encountered error when attempting to unmarshall bucket session info properties: %w", err) - } - return sessionInfo, nil -} - func StructuredS3Params(p map[string]string) (*S3Params, error) { sparams := &S3Params{} if val, ok := p["fromEnv"]; ok { diff --git a/backend/src/v2/objectstore/config_test.go b/backend/src/v2/objectstore/config_test.go new file mode 100644 index 00000000000..5f2329ca628 --- /dev/null +++ b/backend/src/v2/objectstore/config_test.go @@ -0,0 +1,16 @@ +package objectstore + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func Test_ParseBucketPathToConfig(t *testing.T) { + pipelineRoot := "s3://mlpipeline/v2/artifacts/" + result, err := ParseBucketPathToConfig(pipelineRoot) + require.NoError(t, err) + require.Equal(t, "s3://", result.Scheme) + require.Equal(t, "v2/artifacts/", result.Prefix) + require.Equal(t, "mlpipeline", result.BucketName) +} diff --git a/backend/src/v2/objectstore/object_store.go b/backend/src/v2/objectstore/object_store.go index 386d45acfb1..193800be918 100644 --- a/backend/src/v2/objectstore/object_store.go +++ b/backend/src/v2/objectstore/object_store.go @@ -31,7 +31,6 @@ import ( "github.com/golang/glog" "gocloud.dev/blob" "gocloud.dev/blob/gcsblob" - _ "gocloud.dev/blob/gcsblob" "gocloud.dev/blob/s3blob" "gocloud.dev/gcp" "golang.org/x/oauth2/google" @@ -39,18 +38,24 @@ import ( "k8s.io/client-go/kubernetes" ) -func OpenBucket(ctx context.Context, k8sClient kubernetes.Interface, namespace string, config *Config) (bucket *blob.Bucket, err error) { +func OpenBucket( + ctx context.Context, + k8sClient kubernetes.Interface, + namespace string, + config *Config, + sessionInfo *SessionInfo, +) (bucket *blob.Bucket, err error) { defer func() { if err != nil { err = fmt.Errorf("Failed to open bucket %q: %w", config.BucketName, err) } }() - if config.SessionInfo != nil { - switch config.SessionInfo.Provider { + if sessionInfo != nil { + switch sessionInfo.Provider { case "minio", "s3": - s3Client, err1 := createS3BucketSession(ctx, namespace, config.SessionInfo, k8sClient) + s3Client, err1 := createS3BucketSession(ctx, namespace, sessionInfo, k8sClient) if err1 != nil { - return nil, fmt.Errorf("Failed to retrieve credentials for bucket %s: %w", config.BucketName, err1) + return nil, fmt.Errorf("failed to retrieve credentials for bucket %s: %w", config.BucketName, err1) } if s3Client != nil { // Use s3blob.OpenBucketV2 with the configured S3 client to leverage retry logic @@ -63,7 +68,7 @@ func OpenBucket(ctx context.Context, k8sClient kubernetes.Interface, namespace s return blob.PrefixedBucket(openedBucket, config.Prefix), nil } case "gs": - client, err1 := getGCSTokenClient(ctx, namespace, config.SessionInfo, k8sClient) + client, err1 := getGCSTokenClient(ctx, namespace, sessionInfo, k8sClient) if err1 != nil { return nil, err1 } @@ -126,6 +131,7 @@ func UploadBlob(ctx context.Context, bucket *blob.Bucket, localPath, blobPath st func DownloadBlob(ctx context.Context, bucket *blob.Bucket, localDir, blobDir string) error { iter := bucket.List(&blob.ListOptions{Prefix: blobDir}) + downloadedBlob := false for { obj, err := iter.Next(ctx) if err != nil { @@ -148,8 +154,12 @@ func DownloadBlob(ctx context.Context, bucket *blob.Bucket, localDir, blobDir st if err := downloadFile(ctx, bucket, obj.Key, filepath.Join(localDir, relativePath)); err != nil { return err } + downloadedBlob = true } } + if !downloadedBlob { + return fmt.Errorf("no blob found in remote storage %q", blobDir) + } return nil } @@ -228,11 +238,11 @@ func getGCSTokenClient(ctx context.Context, namespace string, sessionInfo *Sessi if err != nil { return nil, err } - tokenJson, ok := secret.Data[params.TokenKey] - if !ok || len(tokenJson) == 0 { + tokenJSON, ok := secret.Data[params.TokenKey] + if !ok || len(tokenJSON) == 0 { return nil, fmt.Errorf("key '%s' not found or is empty", params.TokenKey) } - creds, err := google.CredentialsFromJSON(ctx, tokenJson, "https://www.googleapis.com/auth/devstorage.read_write") + creds, err := google.CredentialsFromJSON(ctx, tokenJSON, "https://www.googleapis.com/auth/devstorage.read_write") if err != nil { return nil, err } @@ -297,7 +307,7 @@ func createS3BucketSession(ctx context.Context, namespace string, sessionInfo *S } s3Client := s3.NewFromConfig(s3Config, s3Options) if s3Client == nil { - return nil, fmt.Errorf("Failed to create object store session, %v", err) + return nil, fmt.Errorf("failed to create object store session, %v", err) } return s3Client, nil } @@ -313,7 +323,7 @@ func getS3BucketCredential( defer func() { if err != nil { // wrap error before returning - err = fmt.Errorf("Failed to get Bucket credentials from secret name=%q namespace=%q: %w", secretName, namespace, err) + err = fmt.Errorf("failed to get bucket credentials from secret name=%q namespace=%q: %w", secretName, namespace, err) } }() secret, err := clientSet.CoreV1().Secrets(namespace).Get( diff --git a/backend/src/v2/objectstore/object_store_test.go b/backend/src/v2/objectstore/object_store_test.go index 42c7a779b0f..208a56a0850 100644 --- a/backend/src/v2/objectstore/object_store_test.go +++ b/backend/src/v2/objectstore/object_store_test.go @@ -17,7 +17,6 @@ package objectstore import ( "context" "fmt" - "reflect" "testing" "github.com/stretchr/testify/assert" @@ -28,179 +27,6 @@ import ( _ "gocloud.dev/blob/gcsblob" ) -func Test_parseCloudBucket(t *testing.T) { - tests := []struct { - name string - path string - want *Config - wantErr bool - }{ - { - name: "Parses GCS - Just the bucket", - path: "gs://my-bucket", - want: &Config{ - Scheme: "gs://", - BucketName: "my-bucket", - Prefix: "", - }, - wantErr: false, - }, - { - name: "Parses GCS - Just the bucket with trailing slash", - path: "gs://my-bucket/", - want: &Config{ - Scheme: "gs://", - BucketName: "my-bucket", - Prefix: "", - }, - wantErr: false, - }, - { - name: "Parses GCS - Bucket with prefix", - path: "gs://my-bucket/my-path", - want: &Config{ - Scheme: "gs://", - BucketName: "my-bucket", - Prefix: "my-path/", - }, - wantErr: false, - }, - { - name: "Parses GCS - Bucket with prefix and trailing slash", - path: "gs://my-bucket/my-path/", - want: &Config{ - Scheme: "gs://", - BucketName: "my-bucket", - Prefix: "my-path/", - }, - wantErr: false, - }, - { - name: "Parses GCS - Bucket with multiple path components in prefix", - path: "gs://my-bucket/my-path/123", - want: &Config{ - Scheme: "gs://", - BucketName: "my-bucket", - Prefix: "my-path/123/", - }, - wantErr: false, - }, - { - name: "Parses GCS - Bucket with multiple path components in prefix and trailing slash", - path: "gs://my-bucket/my-path/123/", - want: &Config{ - Scheme: "gs://", - BucketName: "my-bucket", - Prefix: "my-path/123/", - }, - wantErr: false, - }, - { - name: "Parses Minio - Bucket with query string", - path: "minio://my-bucket", - want: &Config{ - Scheme: "minio://", - BucketName: "my-bucket", - Prefix: "", - QueryString: "", - }, - wantErr: false, - }, { - name: "Parses Minio - Bucket with prefix", - path: "minio://my-bucket/my-path", - want: &Config{ - Scheme: "minio://", - BucketName: "my-bucket", - Prefix: "my-path/", - QueryString: "", - }, - wantErr: false, - }, { - name: "Parses Minio - Bucket with multiple path components in prefix", - path: "minio://my-bucket/my-path/123", - want: &Config{ - Scheme: "minio://", - BucketName: "my-bucket", - Prefix: "my-path/123/", - QueryString: "", - }, - wantErr: false, - }, { - name: "Parses S3 - Bucket with session", - path: "s3://my-bucket/my-path/123", - want: &Config{ - Scheme: "s3://", - BucketName: "my-bucket", - Prefix: "my-path/123/", - QueryString: "", - SessionInfo: &SessionInfo{ - Provider: "s3", - Params: map[string]string{ - "region": "us-east-1", - "endpoint": "s3.amazonaws.com", - "disableSSL": "false", - "fromEnv": "false", - "secretName": "s3-testsecret", - "accessKeyKey": "s3-testaccessKeyKey", - "secretKeyKey": "s3-testsecretKeyKey", - }, - }, - }, - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := ParseBucketConfig(tt.path, tt.want.SessionInfo) - if (err != nil) != tt.wantErr { - t.Errorf("%q: parseCloudBucket() error = %v, wantErr %v", tt.name, err, tt.wantErr) - return - } - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("%q: parseCloudBucket() = %v, want %v", tt.name, got, tt.want) - } - assert.Equal(t, got.SessionInfo, tt.want.SessionInfo) - }) - } -} - -func Test_bucketConfig_KeyFromURI(t *testing.T) { - tests := []struct { - name string - bucketConfig *Config - uri string - want string - wantErr bool - }{ - { - name: "Bucket with empty prefix", - bucketConfig: &Config{Scheme: "gs://", BucketName: "my-bucket", Prefix: ""}, - uri: "gs://my-bucket/path1/path2", - want: "path1/path2", - wantErr: false, - }, - { - name: "Bucket with non-empty Prefix ", - bucketConfig: &Config{Scheme: "gs://", BucketName: "my-bucket", Prefix: "path0/"}, - uri: "gs://my-bucket/path0/path1/path2", - want: "path1/path2", - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := tt.bucketConfig.KeyFromURI(tt.uri) - if (err != nil) != tt.wantErr { - t.Errorf("%q: buckerConfig.keyFromURI() error = %v, wantErr %v", tt.name, err, tt.wantErr) - return - } - if got != tt.want { - t.Errorf("bucketConfig.keyFromURI() = %v, want %v", got, tt.want) - } - }) - } -} - func Test_createS3BucketSession(t *testing.T) { tt := []struct { msg string diff --git a/backend/test/end2end/utils/e2e_utils.go b/backend/test/end2end/utils/e2e_utils.go index 09526501700..9a7721e0123 100644 --- a/backend/test/end2end/utils/e2e_utils.go +++ b/backend/test/end2end/utils/e2e_utils.go @@ -14,6 +14,7 @@ import ( "github.com/kubeflow/pipelines/backend/test/logger" "github.com/kubeflow/pipelines/backend/test/testutil" apitests "github.com/kubeflow/pipelines/backend/test/v2/api" + "github.com/onsi/ginkgo/v2/dsl/core" "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" "github.com/onsi/ginkgo/v2" @@ -61,6 +62,7 @@ func CreatePipelineRunAndWaitForItToFinish(runClient *apiserver.RunClient, testC logger.Log("Create run for pipeline with id: '%s' and name: '%s'", pipelineID, pipelineDisplayName) uploadedPipelineRun := CreatePipelineRun(runClient, testContext, &pipelineID, pipelineVersionID, experimentID, runTimeParams) logger.Log("Created Pipeline Run with id: %s for pipeline with id: %s", uploadedPipelineRun.RunID, pipelineID) + core.GinkgoWriter.Println(fmt.Sprintf("Created Pipeline Run with id: %s for pipeline with id: %s", uploadedPipelineRun.RunID, pipelineID)) timeout := time.Duration(maxPipelineWaitTime) testutil.WaitForRunToBeInState(runClient, &uploadedPipelineRun.RunID, []run_model.V2beta1RuntimeState{run_model.V2beta1RuntimeStateSUCCEEDED, run_model.V2beta1RuntimeStateSKIPPED, run_model.V2beta1RuntimeStateFAILED, run_model.V2beta1RuntimeStateCANCELED}, &timeout) return uploadedPipelineRun.RunID @@ -70,25 +72,16 @@ func CreatePipelineRunAndWaitForItToFinish(runClient *apiserver.RunClient, testC func ValidateComponentStatuses(runClient *apiserver.RunClient, k8Client *kubernetes.Clientset, testContext *apitests.TestContext, runID string, compiledWorkflow *v1alpha1.Workflow) { logger.Log("Fetching updated pipeline run details for run with id=%s", runID) updatedRun := testutil.GetPipelineRun(runClient, &runID) - actualTaskDetails := updatedRun.RunDetails.TaskDetails + actualTaskDetails := updatedRun.Tasks logger.Log("Updated pipeline run details") expectedTaskDetails := GetTasksFromWorkflow(compiledWorkflow) - if *updatedRun.State == run_model.V2beta1RuntimeStateRUNNING { - logger.Log("Pipeline run did not finish, checking workflow controller logs") - podLog := testutil.ReadContainerLogs(k8Client, *config.Namespace, "workflow-controller", nil, &testContext.TestStartTimeUTC, config.PodLogLimit) - logger.Log("Attaching Workflow Controller logs to the report") - ginkgo.AddReportEntry("Workflow Controller Logs", podLog) - ginkgo.Fail("Pipeline run did not complete, it stayed in RUNNING state") - + if *updatedRun.State != run_model.V2beta1RuntimeStateSUCCEEDED { + logger.Log("Looks like the run %s FAILED, so capture pod logs for the failed task", runID) + CapturePodLogsForUnsuccessfulTasks(k8Client, testContext, actualTaskDetails) + ginkgo.Fail("Failing test because the pipeline run was not SUCCESSFUL") } else { - if *updatedRun.State != run_model.V2beta1RuntimeStateSUCCEEDED { - logger.Log("Looks like the run %s FAILED, so capture pod logs for the failed task", runID) - CapturePodLogsForUnsuccessfulTasks(k8Client, testContext, actualTaskDetails) - ginkgo.Fail("Failing test because the pipeline run was not SUCCESSFUL") - } else { - logger.Log("Pipeline run succeeded, checking if the number of tasks are what is expected") - gomega.Expect(len(actualTaskDetails)).To(gomega.BeNumerically(">=", len(expectedTaskDetails)), "Number of created DAG tasks should be >= number of expected tasks") - } + logger.Log("Pipeline run succeeded, checking if the number of tasks are what is expected") + gomega.Expect(len(actualTaskDetails)).To(gomega.BeNumerically(">=", len(expectedTaskDetails)), "Number of created DAG tasks should be >= number of expected tasks") } } @@ -102,42 +95,42 @@ func CapturePodLogsForUnsuccessfulTasks(k8Client *kubernetes.Clientset, testCont for _, task := range taskDetails { if task.State != nil { switch *task.State { - case run_model.V2beta1RuntimeStateSUCCEEDED: + case run_model.PipelineTaskDetailTaskStateSUCCEEDED: { - logger.Log("SUCCEEDED - Task %s for run %s has finished successfully", task.DisplayName, task.RunID) + logger.Log("SUCCEEDED - Task %s for run %s has finished successfully", task.Name, task.RunID) } - case run_model.V2beta1RuntimeStateRUNNING: + case run_model.PipelineTaskDetailTaskStateRUNNING: { - logger.Log("RUNNING - Task %s for Run %s is running", task.DisplayName, task.RunID) + logger.Log("RUNNING - Task %s for Run %s is running", task.Name, task.RunID) } - case run_model.V2beta1RuntimeStateSKIPPED: + case run_model.PipelineTaskDetailTaskStateSKIPPED: { - logger.Log("SKIPPED - Task %s for Run %s skipped", task.DisplayName, task.RunID) + logger.Log("SKIPPED - Task %s for Run %s skipped", task.Name, task.RunID) } - case run_model.V2beta1RuntimeStateCANCELED: + case run_model.PipelineTaskDetailTaskStateCACHED: { - logger.Log("CANCELED - Task %s for Run %s canceled", task.DisplayName, task.RunID) + logger.Log("CACHED - Task %s for Run %s cached", task.Name, task.RunID) } - case run_model.V2beta1RuntimeStateFAILED: + case run_model.PipelineTaskDetailTaskStateFAILED: { - logger.Log("%s - Task %s for Run %s did not complete successfully", *task.State, task.DisplayName, task.RunID) - for _, childTask := range task.ChildTasks { - podName := childTask.PodName + logger.Log("%s - Task %s for Run %s did not complete successfully", *task.State, task.Name, task.RunID) + for _, pod := range task.Pods { + podName := pod.Name if podName != "" { - logger.Log("Capturing pod logs for task %s, with pod name %s", task.DisplayName, podName) + logger.Log("Capturing pod logs for task %s, with pod name %s", task.Name, podName) podLog := testutil.ReadPodLogs(k8Client, *config.Namespace, podName, nil, &testContext.TestStartTimeUTC, config.PodLogLimit) - logger.Log("Pod logs captured for task %s in pod %s", task.DisplayName, podName) + logger.Log("Pod logs captured for task %s in pod %s", task.Name, podName) logger.Log("Attaching pod logs to the report") - ginkgo.AddReportEntry(fmt.Sprintf("Failing '%s' Component Log", task.DisplayName), podLog) + ginkgo.AddReportEntry(fmt.Sprintf("Failing '%s' Component Log", task.Name), podLog) logger.Log("Attached pod logs to the report") } } - failedTasks[task.DisplayName] = string(*task.State) + failedTasks[task.Name] = string(*task.State) } default: { - logger.Log("UNKNOWN state - Task %s for Run %s has an UNKNOWN state", task.DisplayName, task.RunID) + logger.Log("UNKNOWN state - Task %s for Run %s has an UNKNOWN state", task.Name, task.RunID) } } } diff --git a/backend/test/proto_tests/README.md b/backend/test/proto_tests/README.md index cd0e97829f1..f5e4e4399e0 100644 --- a/backend/test/proto_tests/README.md +++ b/backend/test/proto_tests/README.md @@ -26,4 +26,4 @@ export UPDATE_EXPECTED=false go test . # verify your changes ``` -Note that it is very unlikely you should need to update this code, if you do then it is a good sign you are introducing breaking changes, so use it wisely. +Take care when updating these files, as they are used to verify backwards compatibility. diff --git a/backend/test/proto_tests/proto_test.go b/backend/test/proto_tests/proto_test.go index 469a084fdf2..56a893636cd 100644 --- a/backend/test/proto_tests/proto_test.go +++ b/backend/test/proto_tests/proto_test.go @@ -15,7 +15,6 @@ package proto_tests import ( - "fmt" "path/filepath" "testing" @@ -23,12 +22,8 @@ import ( pb "github.com/kubeflow/pipelines/backend/api/v2beta1/go_client" ) -// This is the commit that contains the proto generated files -// that were used to generate the test data. -const commit = "1791485" - func generatePath(path string) string { - return filepath.Join(fmt.Sprintf("generated-%s", commit), path) + return filepath.Join("generated", path) } func TestRuns(t *testing.T) { diff --git a/backend/test/proto_tests/testdata/generated-1791485/experiment.json b/backend/test/proto_tests/testdata/generated-1791485/experiment.json deleted file mode 100644 index ec8ef016ad5..00000000000 --- a/backend/test/proto_tests/testdata/generated-1791485/experiment.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "experiment_id": "exp-456", - "display_name": "Production Data Processing Experiment", - "description": "Experiment for testing production data processing pipeline", - "created_at": "2024-01-01T12:00:00Z", - "namespace": "namespace1", - "storage_state": "AVAILABLE", - "last_run_created_at": "2024-01-01T12:00:00Z" -} \ No newline at end of file diff --git a/backend/test/proto_tests/testdata/generated-1791485/pipeline_spec.json b/backend/test/proto_tests/testdata/generated-1791485/pipeline_spec.json deleted file mode 100644 index 3333c7b0b59..00000000000 --- a/backend/test/proto_tests/testdata/generated-1791485/pipeline_spec.json +++ /dev/null @@ -1,115 +0,0 @@ -{ - "pipeline_info": { - "name": "sample-pipeline", - "display_name": "", - "description": "Sample pipeline for testing" - }, - "deployment_spec": {}, - "sdk_version": "", - "schema_version": "", - "components": { - "comp-1": { - "input_definitions": { - "artifacts": {}, - "parameters": { - "param1": { - "type": "PRIMITIVE_TYPE_UNSPECIFIED", - "parameter_type": "STRING", - "default_value": null, - "is_optional": false, - "description": "" - } - } - }, - "output_definitions": { - "artifacts": {}, - "parameters": { - "output1": { - "type": "PRIMITIVE_TYPE_UNSPECIFIED", - "parameter_type": "STRING", - "description": "" - } - } - }, - "dag": { - "tasks": { - "task1": { - "task_info": { - "name": "task1" - }, - "inputs": { - "parameters": { - "param1": { - "component_input_parameter": "param1", - "parameter_expression_selector": "" - } - }, - "artifacts": {} - }, - "dependent_tasks": [], - "caching_options": null, - "component_ref": { - "name": "comp-1" - }, - "trigger_policy": null, - "retry_policy": null, - "iterator_policy": null - } - }, - "outputs": { - "artifacts": {}, - "parameters": { - "output1": { - "value_from_parameter": { - "producer_subtask": "foo", - "output_parameter_key": "bar" - } - } - } - } - }, - "single_platform_specs": [], - "task_config_passthroughs": [] - } - }, - "root": { - "input_definitions": { - "artifacts": {}, - "parameters": { - "input1": { - "type": "PRIMITIVE_TYPE_UNSPECIFIED", - "parameter_type": "STRING", - "default_value": "foo", - "is_optional": false, - "description": "" - }, - "input2": { - "type": "PRIMITIVE_TYPE_UNSPECIFIED", - "parameter_type": "STRING", - "default_value": null, - "is_optional": false, - "description": "" - } - } - }, - "output_definitions": { - "artifacts": {}, - "parameters": { - "output1": { - "type": "PRIMITIVE_TYPE_UNSPECIFIED", - "parameter_type": "STRING", - "description": "" - }, - "output2": { - "type": "PRIMITIVE_TYPE_UNSPECIFIED", - "parameter_type": "NUMBER_INTEGER", - "description": "" - } - } - }, - "executor_label": "root-executor", - "single_platform_specs": [], - "task_config_passthroughs": [] - }, - "default_pipeline_root": "" -} \ No newline at end of file diff --git a/backend/test/proto_tests/testdata/generated-1791485/recurring_run.json b/backend/test/proto_tests/testdata/generated-1791485/recurring_run.json deleted file mode 100644 index b5ac3cbdb00..00000000000 --- a/backend/test/proto_tests/testdata/generated-1791485/recurring_run.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "recurring_run_id": "recurring-run-789", - "display_name": "Daily Data Processing", - "description": "Scheduled pipeline for daily data processing tasks", - "pipeline_version_reference": { - "pipeline_id": "9b187b86-7c0a-42ae-a0bc-2a746b6eb7a3", - "pipeline_version_id": "e15dc3ec-b45e-4cc7-bb07-e76b5dbce99a" - }, - "runtime_config": { - "parameters": { - "batch_size": 500, - "processing_date": "${system.date}" - }, - "pipeline_root": "" - }, - "service_account": "sa3", - "max_concurrency": "0", - "trigger": { - "periodic_schedule": { - "start_time": "2024-01-01T12:00:00Z", - "end_time": null, - "interval_second": "86400" - } - }, - "mode": "ENABLE", - "created_at": "2024-01-01T12:00:00Z", - "updated_at": "2024-01-01T12:00:00Z", - "status": "ENABLED", - "error": null, - "no_catchup": false, - "namespace": "namespace1", - "experiment_id": "" -} \ No newline at end of file diff --git a/backend/test/proto_tests/testdata/generated-1791485/run_completed.json b/backend/test/proto_tests/testdata/generated-1791485/run_completed.json deleted file mode 100644 index 1eb3d07da91..00000000000 --- a/backend/test/proto_tests/testdata/generated-1791485/run_completed.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "experiment_id": "exp-456", - "run_id": "completed-run-123", - "display_name": "Production Pipeline Run", - "storage_state": "AVAILABLE", - "description": "Production pipeline execution for data processing", - "pipeline_version_reference": { - "pipeline_id": "9b187b86-7c0a-42ae-a0bc-2a746b6eb7a3", - "pipeline_version_id": "e15dc3ec-b45e-4cc7-bb07-e76b5dbce99a" - }, - "runtime_config": { - "parameters": { - "batch_size": 1000, - "learning_rate": "foo" - }, - "pipeline_root": "" - }, - "service_account": "sa1", - "created_at": "2024-01-01T12:00:00Z", - "scheduled_at": "2024-01-01T12:00:00Z", - "finished_at": "2024-01-01T12:00:00Z", - "state": "SUCCEEDED", - "error": null, - "run_details": null, - "recurring_run_id": "recurring-schedule-001", - "state_history": [] -} \ No newline at end of file diff --git a/backend/test/proto_tests/testdata/generated-1791485/run_completed_with_spec.json b/backend/test/proto_tests/testdata/generated-1791485/run_completed_with_spec.json deleted file mode 100644 index 331453529d3..00000000000 --- a/backend/test/proto_tests/testdata/generated-1791485/run_completed_with_spec.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "experiment_id": "exp-456", - "run_id": "completed-run-123", - "display_name": "Production Pipeline Run", - "storage_state": "AVAILABLE", - "description": "Production pipeline execution for data processing", - "pipeline_spec": { - "components": { - "comp-hello-world": { - "executorLabel": "exec-hello-world" - } - }, - "deploymentSpec": { - "executors": { - "exec-hello-world": { - "container": { - "args": [ - "--executor_input", - "{{$}}", - "--function_to_execute", - "hello_world" - ], - "command": [ - "sh", - "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.0' '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"$0\" \"$@\"\n", - "sh", - "-ec", - "program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef hello_world():\n print(\"hello world\")\n\n" - ], - "image": "python:3.11" - } - } - } - }, - "pipelineInfo": { - "name": "pipeline-hello-world" - }, - "root": { - "dag": { - "tasks": { - "hello-world": { - "cachingOptions": {}, - "componentRef": { - "name": "comp-hello-world" - }, - "taskInfo": { - "name": "hello-world" - } - } - } - } - }, - "schemaVersion": "2.1.0", - "sdkVersion": "kfp-2.14.0" - }, - "runtime_config": { - "parameters": { - "batch_size": 1000, - "learning_rate": "foo" - }, - "pipeline_root": "" - }, - "service_account": "sa1", - "created_at": "2024-01-01T12:00:00Z", - "scheduled_at": "2024-01-01T12:00:00Z", - "finished_at": "2024-01-01T12:00:00Z", - "state": "SUCCEEDED", - "error": null, - "run_details": null, - "recurring_run_id": "recurring-schedule-001", - "state_history": [] -} \ No newline at end of file diff --git a/backend/test/proto_tests/testdata/generated-1791485/run_failed.json b/backend/test/proto_tests/testdata/generated-1791485/run_failed.json deleted file mode 100644 index 5dae9160864..00000000000 --- a/backend/test/proto_tests/testdata/generated-1791485/run_failed.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "experiment_id": "exp-789", - "run_id": "failed-run-456", - "display_name": "Data Processing Pipeline", - "storage_state": "AVAILABLE", - "description": "Failed attempt to process customer data", - "runtime_config": null, - "service_account": "sa2", - "created_at": "2024-01-01T12:00:00Z", - "scheduled_at": "2024-01-01T12:00:00Z", - "finished_at": "2024-01-01T12:00:00Z", - "state": "FAILED", - "error": { - "code": 1, - "message": "This was a Failed Run.", - "details": [] - }, - "run_details": null, - "recurring_run_id": "", - "state_history": [] -} \ No newline at end of file diff --git a/backend/test/proto_tests/testdata/generated-1791485/visualization.json b/backend/test/proto_tests/testdata/generated-1791485/visualization.json deleted file mode 100644 index 4b3a1c2f328..00000000000 --- a/backend/test/proto_tests/testdata/generated-1791485/visualization.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "type": "ROC_CURVE", - "source": "gs://my-bucket/data/visualization.csv", - "arguments": "{\"param1\": \"value1\", \"param2\": \"value2\"}", - "html": "
Generated Visualization
", - "error": "" -} \ No newline at end of file diff --git a/backend/test/proto_tests/testdata/generated/experiment.json b/backend/test/proto_tests/testdata/generated/experiment.json new file mode 100644 index 00000000000..ddc34c116f2 --- /dev/null +++ b/backend/test/proto_tests/testdata/generated/experiment.json @@ -0,0 +1,9 @@ +{ + "experiment_id": "exp-456", + "display_name": "Production Data Processing Experiment", + "description": "Experiment for testing production data processing pipeline", + "created_at": "2024-01-01T12:00:00Z", + "namespace": "namespace1", + "storage_state": "AVAILABLE", + "last_run_created_at": "2024-01-01T12:00:00Z" +} \ No newline at end of file diff --git a/backend/test/proto_tests/testdata/generated-1791485/experiment.pb b/backend/test/proto_tests/testdata/generated/experiment.pb similarity index 100% rename from backend/test/proto_tests/testdata/generated-1791485/experiment.pb rename to backend/test/proto_tests/testdata/generated/experiment.pb diff --git a/backend/test/proto_tests/testdata/generated-1791485/pipeline.json b/backend/test/proto_tests/testdata/generated/pipeline.json similarity index 100% rename from backend/test/proto_tests/testdata/generated-1791485/pipeline.json rename to backend/test/proto_tests/testdata/generated/pipeline.json diff --git a/backend/test/proto_tests/testdata/generated-1791485/pipeline.pb b/backend/test/proto_tests/testdata/generated/pipeline.pb similarity index 100% rename from backend/test/proto_tests/testdata/generated-1791485/pipeline.pb rename to backend/test/proto_tests/testdata/generated/pipeline.pb diff --git a/backend/test/proto_tests/testdata/generated/pipeline_spec.json b/backend/test/proto_tests/testdata/generated/pipeline_spec.json new file mode 100644 index 00000000000..8ac278bd715 --- /dev/null +++ b/backend/test/proto_tests/testdata/generated/pipeline_spec.json @@ -0,0 +1,115 @@ +{ + "pipeline_info": { + "name": "sample-pipeline", + "display_name": "", + "description": "Sample pipeline for testing" + }, + "deployment_spec": {}, + "sdk_version": "", + "schema_version": "", + "components": { + "comp-1": { + "input_definitions": { + "artifacts": {}, + "parameters": { + "param1": { + "type": "PRIMITIVE_TYPE_UNSPECIFIED", + "parameter_type": "STRING", + "default_value": null, + "is_optional": false, + "description": "" + } + } + }, + "output_definitions": { + "artifacts": {}, + "parameters": { + "output1": { + "type": "PRIMITIVE_TYPE_UNSPECIFIED", + "parameter_type": "STRING", + "description": "" + } + } + }, + "dag": { + "tasks": { + "task1": { + "task_info": { + "name": "task1" + }, + "inputs": { + "parameters": { + "param1": { + "component_input_parameter": "param1", + "parameter_expression_selector": "" + } + }, + "artifacts": {} + }, + "dependent_tasks": [], + "caching_options": null, + "component_ref": { + "name": "comp-1" + }, + "trigger_policy": null, + "retry_policy": null, + "iterator_policy": null + } + }, + "outputs": { + "artifacts": {}, + "parameters": { + "output1": { + "value_from_parameter": { + "producer_subtask": "foo", + "output_parameter_key": "bar" + } + } + } + } + }, + "single_platform_specs": [], + "task_config_passthroughs": [] + } + }, + "root": { + "input_definitions": { + "artifacts": {}, + "parameters": { + "input1": { + "type": "PRIMITIVE_TYPE_UNSPECIFIED", + "parameter_type": "STRING", + "default_value": "foo", + "is_optional": false, + "description": "" + }, + "input2": { + "type": "PRIMITIVE_TYPE_UNSPECIFIED", + "parameter_type": "STRING", + "default_value": null, + "is_optional": false, + "description": "" + } + } + }, + "output_definitions": { + "artifacts": {}, + "parameters": { + "output1": { + "type": "PRIMITIVE_TYPE_UNSPECIFIED", + "parameter_type": "STRING", + "description": "" + }, + "output2": { + "type": "PRIMITIVE_TYPE_UNSPECIFIED", + "parameter_type": "NUMBER_INTEGER", + "description": "" + } + } + }, + "executor_label": "root-executor", + "single_platform_specs": [], + "task_config_passthroughs": [] + }, + "default_pipeline_root": "" +} \ No newline at end of file diff --git a/backend/test/proto_tests/testdata/generated-1791485/pipeline_spec.pb b/backend/test/proto_tests/testdata/generated/pipeline_spec.pb similarity index 83% rename from backend/test/proto_tests/testdata/generated-1791485/pipeline_spec.pb rename to backend/test/proto_tests/testdata/generated/pipeline_spec.pb index 4f4f8023f05..9e0d394e65b 100644 Binary files a/backend/test/proto_tests/testdata/generated-1791485/pipeline_spec.pb and b/backend/test/proto_tests/testdata/generated/pipeline_spec.pb differ diff --git a/backend/test/proto_tests/testdata/generated-1791485/pipeline_version.json b/backend/test/proto_tests/testdata/generated/pipeline_version.json similarity index 100% rename from backend/test/proto_tests/testdata/generated-1791485/pipeline_version.json rename to backend/test/proto_tests/testdata/generated/pipeline_version.json diff --git a/backend/test/proto_tests/testdata/generated-1791485/pipeline_version.pb b/backend/test/proto_tests/testdata/generated/pipeline_version.pb similarity index 90% rename from backend/test/proto_tests/testdata/generated-1791485/pipeline_version.pb rename to backend/test/proto_tests/testdata/generated/pipeline_version.pb index 7ffd67ca12b..1550e8778d8 100644 Binary files a/backend/test/proto_tests/testdata/generated-1791485/pipeline_version.pb and b/backend/test/proto_tests/testdata/generated/pipeline_version.pb differ diff --git a/backend/test/proto_tests/testdata/generated-1791485/platform_spec.json b/backend/test/proto_tests/testdata/generated/platform_spec.json similarity index 100% rename from backend/test/proto_tests/testdata/generated-1791485/platform_spec.json rename to backend/test/proto_tests/testdata/generated/platform_spec.json diff --git a/backend/test/proto_tests/testdata/generated-1791485/platform_spec.pb b/backend/test/proto_tests/testdata/generated/platform_spec.pb similarity index 100% rename from backend/test/proto_tests/testdata/generated-1791485/platform_spec.pb rename to backend/test/proto_tests/testdata/generated/platform_spec.pb diff --git a/backend/test/proto_tests/testdata/generated/recurring_run.json b/backend/test/proto_tests/testdata/generated/recurring_run.json new file mode 100644 index 00000000000..f49c2acb765 --- /dev/null +++ b/backend/test/proto_tests/testdata/generated/recurring_run.json @@ -0,0 +1,33 @@ +{ + "recurring_run_id": "recurring-run-789", + "display_name": "Daily Data Processing", + "description": "Scheduled pipeline for daily data processing tasks", + "pipeline_version_reference": { + "pipeline_id": "9b187b86-7c0a-42ae-a0bc-2a746b6eb7a3", + "pipeline_version_id": "e15dc3ec-b45e-4cc7-bb07-e76b5dbce99a" + }, + "runtime_config": { + "parameters": { + "batch_size": 500, + "processing_date": "${system.date}" + }, + "pipeline_root": "" + }, + "service_account": "sa3", + "max_concurrency": "0", + "trigger": { + "periodic_schedule": { + "start_time": "2024-01-01T12:00:00Z", + "end_time": null, + "interval_second": "86400" + } + }, + "mode": "ENABLE", + "created_at": "2024-01-01T12:00:00Z", + "updated_at": "2024-01-01T12:00:00Z", + "status": "ENABLED", + "error": null, + "no_catchup": false, + "namespace": "namespace1", + "experiment_id": "" +} \ No newline at end of file diff --git a/backend/test/proto_tests/testdata/generated-1791485/recurring_run.pb b/backend/test/proto_tests/testdata/generated/recurring_run.pb similarity index 100% rename from backend/test/proto_tests/testdata/generated-1791485/recurring_run.pb rename to backend/test/proto_tests/testdata/generated/recurring_run.pb diff --git a/backend/test/proto_tests/testdata/generated/run_completed.json b/backend/test/proto_tests/testdata/generated/run_completed.json new file mode 100644 index 00000000000..c309431df4d --- /dev/null +++ b/backend/test/proto_tests/testdata/generated/run_completed.json @@ -0,0 +1,30 @@ +{ + "experiment_id": "exp-456", + "run_id": "completed-run-123", + "display_name": "Production Pipeline Run", + "storage_state": "AVAILABLE", + "description": "Production pipeline execution for data processing", + "pipeline_version_reference": { + "pipeline_id": "9b187b86-7c0a-42ae-a0bc-2a746b6eb7a3", + "pipeline_version_id": "e15dc3ec-b45e-4cc7-bb07-e76b5dbce99a" + }, + "runtime_config": { + "parameters": { + "batch_size": 1000, + "learning_rate": "foo" + }, + "pipeline_root": "" + }, + "service_account": "sa1", + "created_at": "2024-01-01T12:00:00Z", + "scheduled_at": "2024-01-01T12:00:00Z", + "finished_at": "2024-01-01T12:00:00Z", + "state": "SUCCEEDED", + "error": null, + "run_details": null, + "recurring_run_id": "recurring-schedule-001", + "state_history": [], + "pipeline_reference": null, + "task_count": 0, + "tasks": [] +} \ No newline at end of file diff --git a/backend/test/proto_tests/testdata/generated-1791485/run_completed.pb b/backend/test/proto_tests/testdata/generated/run_completed.pb similarity index 100% rename from backend/test/proto_tests/testdata/generated-1791485/run_completed.pb rename to backend/test/proto_tests/testdata/generated/run_completed.pb diff --git a/backend/test/proto_tests/testdata/generated/run_completed_with_spec.json b/backend/test/proto_tests/testdata/generated/run_completed_with_spec.json new file mode 100644 index 00000000000..770d06c8307 --- /dev/null +++ b/backend/test/proto_tests/testdata/generated/run_completed_with_spec.json @@ -0,0 +1,77 @@ +{ + "experiment_id": "exp-456", + "run_id": "completed-run-123", + "display_name": "Production Pipeline Run", + "storage_state": "AVAILABLE", + "description": "Production pipeline execution for data processing", + "pipeline_spec": { + "components": { + "comp-hello-world": { + "executorLabel": "exec-hello-world" + } + }, + "deploymentSpec": { + "executors": { + "exec-hello-world": { + "container": { + "args": [ + "--executor_input", + "{{$}}", + "--function_to_execute", + "hello_world" + ], + "command": [ + "sh", + "-c", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==2.14.0' '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' && \"$0\" \"$@\"\n", + "sh", + "-ec", + "program_path=$(mktemp -d)\n\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\n_KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", + "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef hello_world():\n print(\"hello world\")\n\n" + ], + "image": "python:3.11" + } + } + } + }, + "pipelineInfo": { + "name": "pipeline-hello-world" + }, + "root": { + "dag": { + "tasks": { + "hello-world": { + "cachingOptions": {}, + "componentRef": { + "name": "comp-hello-world" + }, + "taskInfo": { + "name": "hello-world" + } + } + } + } + }, + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-2.14.0" + }, + "runtime_config": { + "parameters": { + "batch_size": 1000, + "learning_rate": "foo" + }, + "pipeline_root": "" + }, + "service_account": "sa1", + "created_at": "2024-01-01T12:00:00Z", + "scheduled_at": "2024-01-01T12:00:00Z", + "finished_at": "2024-01-01T12:00:00Z", + "state": "SUCCEEDED", + "error": null, + "run_details": null, + "recurring_run_id": "recurring-schedule-001", + "state_history": [], + "pipeline_reference": null, + "task_count": 0, + "tasks": [] +} \ No newline at end of file diff --git a/backend/test/proto_tests/testdata/generated-1791485/run_completed_with_spec.pb b/backend/test/proto_tests/testdata/generated/run_completed_with_spec.pb similarity index 96% rename from backend/test/proto_tests/testdata/generated-1791485/run_completed_with_spec.pb rename to backend/test/proto_tests/testdata/generated/run_completed_with_spec.pb index 9221f57eb2f..9a3dd030848 100644 Binary files a/backend/test/proto_tests/testdata/generated-1791485/run_completed_with_spec.pb and b/backend/test/proto_tests/testdata/generated/run_completed_with_spec.pb differ diff --git a/backend/test/proto_tests/testdata/generated/run_failed.json b/backend/test/proto_tests/testdata/generated/run_failed.json new file mode 100644 index 00000000000..78a62a8c8ff --- /dev/null +++ b/backend/test/proto_tests/testdata/generated/run_failed.json @@ -0,0 +1,24 @@ +{ + "experiment_id": "exp-789", + "run_id": "failed-run-456", + "display_name": "Data Processing Pipeline", + "storage_state": "AVAILABLE", + "description": "Failed attempt to process customer data", + "runtime_config": null, + "service_account": "sa2", + "created_at": "2024-01-01T12:00:00Z", + "scheduled_at": "2024-01-01T12:00:00Z", + "finished_at": "2024-01-01T12:00:00Z", + "state": "FAILED", + "error": { + "code": 1, + "message": "This was a Failed Run.", + "details": [] + }, + "run_details": null, + "recurring_run_id": "", + "state_history": [], + "pipeline_reference": null, + "task_count": 0, + "tasks": [] +} \ No newline at end of file diff --git a/backend/test/proto_tests/testdata/generated-1791485/run_failed.pb b/backend/test/proto_tests/testdata/generated/run_failed.pb similarity index 100% rename from backend/test/proto_tests/testdata/generated-1791485/run_failed.pb rename to backend/test/proto_tests/testdata/generated/run_failed.pb diff --git a/backend/test/proto_tests/testdata/generated/visualization.json b/backend/test/proto_tests/testdata/generated/visualization.json new file mode 100644 index 00000000000..8e7d0fac179 --- /dev/null +++ b/backend/test/proto_tests/testdata/generated/visualization.json @@ -0,0 +1,7 @@ +{ + "type": "ROC_CURVE", + "source": "gs://my-bucket/data/visualization.csv", + "arguments": "{\"param1\": \"value1\", \"param2\": \"value2\"}", + "html": "
Generated Visualization
", + "error": "" +} \ No newline at end of file diff --git a/backend/test/proto_tests/testdata/generated-1791485/visualization.pb b/backend/test/proto_tests/testdata/generated/visualization.pb similarity index 100% rename from backend/test/proto_tests/testdata/generated-1791485/visualization.pb rename to backend/test/proto_tests/testdata/generated/visualization.pb diff --git a/backend/test/testutil/pipeline_run_utils.go b/backend/test/testutil/pipeline_run_utils.go index f9e921bf2be..456390b19dc 100644 --- a/backend/test/testutil/pipeline_run_utils.go +++ b/backend/test/testutil/pipeline_run_utils.go @@ -78,6 +78,7 @@ func GetPipelineRun(runClient *api_server.RunClient, pipelineRunID *string) *run logger.Log("Get a pipeline run with id=%s", *pipelineRunID) pipelineRun, runError := runClient.Get(&run_params.RunServiceGetRunParams{ RunID: *pipelineRunID, + View: strPTR("FULL"), }) gomega.Expect(runError).NotTo(gomega.HaveOccurred(), "Failed to get run with id="+*pipelineRunID) return pipelineRun @@ -153,3 +154,7 @@ func GetPipelineRunTimeInputs(pipelineSpecFile string) map[string]interface{} { logger.Log("Returning pipeline run time inputs %v", pipelineInputMap) return pipelineInputMap } + +func strPTR(s string) *string { + return &s +} diff --git a/backend/test/v2/integration/cache_test.go b/backend/test/v2/integration/cache_test.go index 9893fafc6bf..7bad0a6275d 100644 --- a/backend/test/v2/integration/cache_test.go +++ b/backend/test/v2/integration/cache_test.go @@ -15,11 +15,8 @@ import ( "github.com/kubeflow/pipelines/backend/api/v2beta1/go_http_client/run_model" apiServer "github.com/kubeflow/pipelines/backend/src/common/client/api_server/v2" "github.com/kubeflow/pipelines/backend/src/common/util" - "github.com/kubeflow/pipelines/backend/src/v2/metadata" - "github.com/kubeflow/pipelines/backend/src/v2/metadata/testutils" "github.com/kubeflow/pipelines/backend/test/config" "github.com/kubeflow/pipelines/backend/test/v2" - pb "github.com/kubeflow/pipelines/third_party/ml-metadata/go/ml_metadata" "github.com/golang/glog" "github.com/stretchr/testify/require" @@ -38,20 +35,12 @@ type CacheTestSuite struct { pipelineUploadClient apiServer.PipelineUploadInterface runClient *apiServer.RunClient recurringRunClient *apiServer.RecurringRunClient - mlmdClient pb.MetadataStoreServiceClient } func TestCache(t *testing.T) { suite.Run(t, new(CacheTestSuite)) } -func (s *CacheTestSuite) SetupSuite() { - var err error - s.mlmdClient, err = testutils.NewTestMlmdClient("127.0.0.1", metadata.DefaultConfig().Port, *config.TLSEnabled, *config.CaCertPath) - require.NoError(s.T(), err) - require.NotNil(s.T(), s.mlmdClient) -} - func (s *CacheTestSuite) SetupTest() { if !*runIntegrationTests { s.T().SkipNow() @@ -180,11 +169,13 @@ func (s *CacheTestSuite) TestCacheRecurringRun() { return false }, 4*time.Minute, 5*time.Second) - state := s.getContainerExecutionState(t, allRuns[1].RunID) + task := s.getTask(t, allRuns[1].RunID, "comp") if *cacheEnabled { - require.Equal(t, pb.Execution_CACHED, state) + require.Equal(t, run_model.PipelineTaskDetailTaskStateCACHED, *task.State) + // Verify no executor pod exists for cached task + s.verifyNoExecutorPod(t, task) } else { - require.Equal(t, pb.Execution_COMPLETE, state) + require.Equal(t, run_model.PipelineTaskDetailTaskStateSUCCEEDED, *task.State) } } @@ -193,19 +184,21 @@ func (s *CacheTestSuite) TestCacheSingleRun() { pipelineVersion := s.preparePipeline() - pipelineRunDetail, err := s.createRun(pipelineVersion) + _, err := s.createRun(pipelineVersion) require.NoError(t, err) // Create the second run - pipelineRunDetail, err = s.createRun(pipelineVersion) + pipelineRunDetail, err := s.createRun(pipelineVersion) require.NoError(t, err) require.NotNil(t, pipelineRunDetail) - state := s.getContainerExecutionState(t, pipelineRunDetail.RunID) + task := s.getTask(t, pipelineRunDetail.RunID, "comp") if *cacheEnabled { - require.Equal(t, pb.Execution_CACHED, state) + require.Equal(t, run_model.PipelineTaskDetailTaskStateCACHED, *task.State) + // Verify no executor pod exists for cached task + s.verifyNoExecutorPod(t, task) } else { - require.Equal(t, pb.Execution_COMPLETE, state) + require.Equal(t, run_model.PipelineTaskDetailTaskStateSUCCEEDED, *task.State) } } @@ -274,8 +267,15 @@ func (s *CacheTestSuite) TestCacheSingleRunWithPVC_SameName_Caches() { require.NoError(t, err) require.NotNil(t, run2) - state := s.getContainerExecutionState(t, run2.RunID) - require.Equal(t, pb.Execution_CACHED, state) + // Check producer task is cached + producerTask := s.getTask(t, run2.RunID, "producer") + require.Equal(t, run_model.PipelineTaskDetailTaskStateCACHED, *producerTask.State) + s.verifyNoExecutorPod(t, producerTask) + + // Check consumer task is also cached + consumerTask := s.getTask(t, run2.RunID, "consumer") + require.Equal(t, run_model.PipelineTaskDetailTaskStateCACHED, *consumerTask.State) + s.verifyNoExecutorPod(t, consumerTask) // Third run with a different PVC name should not hit cache. otherPVCName := fmt.Sprintf("%s-alt", pvcName) @@ -298,9 +298,12 @@ func (s *CacheTestSuite) TestCacheSingleRunWithPVC_SameName_Caches() { require.NoError(t, err) require.NotNil(t, run3) - state = s.getContainerExecutionState(t, run3.RunID) // With a different PVC, do not expect cache hit - require.Equal(t, pb.Execution_COMPLETE, state) + producerTask = s.getTask(t, run3.RunID, "producer") + require.Equal(t, run_model.PipelineTaskDetailTaskStateSUCCEEDED, *producerTask.State) + + consumerTask = s.getTask(t, run3.RunID, "consumer") + require.Equal(t, run_model.PipelineTaskDetailTaskStateSUCCEEDED, *consumerTask.State) } func (s *CacheTestSuite) createRun(pipelineVersion *pipeline_upload_model.V2beta1PipelineVersion) (*run_model.V2beta1Run, error) { @@ -392,30 +395,40 @@ func (s *CacheTestSuite) cleanUp() { test.DeleteAllPipelines(s.pipelineClient, s.T()) } -// getContainerExecutionState fetches the container execution state for a given run ID. -func (s *CacheTestSuite) getContainerExecutionState(t *testing.T, runID string) pb.Execution_State { - contextsFilterQuery := fmt.Sprintf("name = '%s'", runID) - - contexts, err := s.mlmdClient.GetContexts(context.Background(), &pb.GetContextsRequest{ - Options: &pb.ListOperationOptions{ - FilterQuery: &contextsFilterQuery, - }, +// getTask fetches the task details for a given run ID and task name. +func (s *CacheTestSuite) getTask(t *testing.T, runID string, taskName string) *run_model.V2beta1PipelineTaskDetail { + // Get run with FULL view to populate tasks + fullView := string(run_model.V2beta1GetRunRequestViewModeFULL) + run, err := s.runClient.Get(&runParams.RunServiceGetRunParams{ + RunID: runID, + View: &fullView, }) require.NoError(t, err) - require.NotNil(t, contexts) + require.NotNil(t, run) + require.NotNil(t, run.Tasks, "Tasks should be populated with FULL view") + + // Find the task by name + for _, task := range run.Tasks { + if task.Name == taskName { + require.NotNil(t, task.State, "Task state should not be nil") + return task + } + } - executionsByContext, err := s.mlmdClient.GetExecutionsByContext(context.Background(), &pb.GetExecutionsByContextRequest{ - ContextId: contexts.Contexts[0].Id, - }) - require.NoError(t, err) - require.NotNil(t, executionsByContext) - require.NotEmpty(t, executionsByContext.Executions) + t.Fatalf("task %s not found in run %s", taskName, runID) + return nil +} + +// verifyNoExecutorPod verifies that there is no executor pod for a cached task. +// When a task is cached, the driver pod should not create an executor pod. +func (s *CacheTestSuite) verifyNoExecutorPod(t *testing.T, task *run_model.V2beta1PipelineTaskDetail) { + require.NotNil(t, task) - for _, execution := range executionsByContext.Executions { - if metadata.ExecutionType(execution.GetType()) == metadata.ContainerExecutionTypeName { - return execution.GetLastKnownState() + // Check the task's pods field for executor pods + for _, pod := range task.Pods { + if pod.Type != nil && *pod.Type == run_model.PipelineTaskDetailTaskPodTypeEXECUTOR { + t.Fatalf("Found executor pod %s (type=%s) for cached task %s, but cached tasks should not have executor pods", + pod.Name, *pod.Type, task.DisplayName) } } - t.Fatalf("no container execution found for run %s", runID) - return pb.Execution_UNKNOWN } diff --git a/go.mod b/go.mod index ba0d5958360..1e9fabea6f6 100644 --- a/go.mod +++ b/go.mod @@ -22,13 +22,11 @@ require ( github.com/google/go-cmp v0.7.0 github.com/google/uuid v1.6.0 github.com/gorilla/mux v1.8.0 - github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.2 // Should match GRPC_GATEWAY_VERSION in backend/api/Dockerfile github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1 github.com/jackc/pgx/v5 v5.7.5 github.com/kubeflow/pipelines/api v0.0.0-20250102152816-873e9dedd766 github.com/kubeflow/pipelines/kubernetes_platform v0.0.0-20240725205754-d911c8b73b49 - github.com/kubeflow/pipelines/third_party/ml-metadata v0.0.0-20240416215826-da804407ad31 github.com/lestrrat-go/strftime v1.0.4 github.com/mattn/go-sqlite3 v1.14.28 github.com/minio/minio-go/v7 v7.0.94 diff --git a/go.sum b/go.sum index 06f1db74415..df78bb16ae9 100644 --- a/go.sum +++ b/go.sum @@ -43,7 +43,6 @@ github.com/Masterminds/sprig/v3 v3.3.0 h1:mQh0Yrg1XPo6vjYXgtf5OtijNAKJRNcTdOOGZe github.com/Masterminds/sprig/v3 v3.3.0/go.mod h1:Zy1iXRYNqNLUolqCpL4uhk6SHUMAOSCzdgBfDb35Lz0= github.com/Masterminds/squirrel v0.0.0-20190107164353-fa735ea14f09 h1:enWVS77aJkLWVIUExiqF6A8eWTVzCXUKUvkST3/wyKI= github.com/Masterminds/squirrel v0.0.0-20190107164353-fa735ea14f09/go.mod h1:yaPeOnPG5ZRwL9oKdTsO/prlkPbXWZlRVMQ/gGlzIuA= -github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= @@ -112,18 +111,10 @@ github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QH github.com/cenkalti/backoff/v5 v5.0.2 h1:rIfFVxEf1QsI7E1ZHfp/B4DF/6QBAUhmgkxc0H7Zss8= github.com/cenkalti/backoff/v5 v5.0.2/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= -github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= -github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv1aFbZMiM9vblcSArJRf2Irls= github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= github.com/colinmarc/hdfs/v2 v2.4.0 h1:v6R8oBx/Wu9fHpdPoJJjpGSUxo8NhHIwrwsfhFvU9W0= @@ -146,9 +137,6 @@ github.com/emicklei/go-restful/v3 v3.12.2/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRr github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= -github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= github.com/envoyproxy/go-control-plane v0.13.4 h1:zEqyPVyku6IvWCFwux4x9RxkLOMUL+1vC9xUFv5l2/M= github.com/envoyproxy/go-control-plane v0.13.4/go.mod h1:kDfuBlDVsSj2MjrLEtRWtHlsWIFcGyB2RMO44Dc5GZA= github.com/envoyproxy/go-control-plane/envoy v1.32.4 h1:jb83lalDRZSpPWW2Z7Mck/8kXZ5CQAFYVjQcdVIr83A= @@ -344,10 +332,7 @@ github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrU github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= -github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= @@ -364,7 +349,6 @@ github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= @@ -401,8 +385,6 @@ github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7Fsg github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 h1:JeSE6pjso5THxAzdVpqr6/geYxZytqFMBCOtn/ujyeo= github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674/go.mod h1:r4w70xmWCQKmi1ONH4KIaBptdivuRPyosB9RmPlGEwA= -github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.2 h1:sGm2vDRFUrQJO/Veii4h4zG2vvqG6uWNkBHSTqXOZk0= -github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.2/go.mod h1:wd1YpapPLivG6nQgbf7ZkG1hhSOXDhhn4MLTknx2aAc= github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1 h1:X5VWvz21y3gzm9Nw/kaUeku/1+uBhcekkmy4IkffJww= @@ -470,8 +452,6 @@ github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/kubeflow/pipelines/third_party/ml-metadata v0.0.0-20240416215826-da804407ad31 h1:t1G2SexX+SwtYiaFrwH1lzGRSiXYMjd2QDT9842Ytpc= -github.com/kubeflow/pipelines/third_party/ml-metadata v0.0.0-20240416215826-da804407ad31/go.mod h1:gh5+EFvuVywvSOYxqT0N91VKuPtScUke/F66RT0NJ80= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 h1:SOEGU9fKiNWd/HOJuq6+3iTQz8KNCLtVX6idSoTLdUw= @@ -585,7 +565,6 @@ github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= -github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.14.0 h1:9tH6MapGnn/j0eb0yIXiLjERO8RB6xIVZRDCX7PtqWA= github.com/spf13/afero v1.14.0/go.mod h1:acJQ8t0ohCGuMN3O+Pv0V0hgMxNYDlvdk+VTfyZmbYo= github.com/spf13/cast v1.9.2 h1:SsGfm7M8QOFtEzumm7UZrZdLLquNdzFYfIbEXntcFbE= @@ -610,7 +589,6 @@ github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/ github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= @@ -682,7 +660,6 @@ go.opentelemetry.io/otel/sdk/metric v1.36.0 h1:r0ntwwGosWGaa0CrSt8cuNuTcccMXERFw go.opentelemetry.io/otel/sdk/metric v1.36.0/go.mod h1:qTNOhFDfKRwX0yXOqJYegL5WRaW376QbB7P4Pb0qva4= go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w= go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA= -go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.opentelemetry.io/proto/otlp v1.7.0 h1:jX1VolD6nHuFzOYso2E73H85i92Mv8JQYk0K9vz09os= go.opentelemetry.io/proto/otlp v1.7.0/go.mod h1:fSKjH6YJ7HDlwzltzyMj036AJ3ejJLCgCSHGj4efDDo= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= @@ -745,9 +722,7 @@ golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwY golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM= -golang.org/x/net v0.0.0-20211216030914-fe4d6282115f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= @@ -787,12 +762,8 @@ golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -867,7 +838,6 @@ google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoA google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20211221231510-d629cc9a93d5/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20250603155806-513f23925822 h1:rHWScKit0gvAPuOnu87KpaYtjK5zBMLcULh7gxkCXu4= google.golang.org/genproto v0.0.0-20250603155806-513f23925822/go.mod h1:HubltRL7rMh0LfnQPkMH4NPDFEWp0jw3vixw7jEM53s= google.golang.org/genproto/googleapis/api v0.0.0-20250715232539-7130f93afb79 h1:iOye66xuaAK0WnkPuhQPUFy8eJcmwUXqGGP3om6IxX8= @@ -880,9 +850,6 @@ google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQ google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= -google.golang.org/grpc v1.43.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= google.golang.org/grpc v1.73.0 h1:VIWSmpI2MegBtTuFt5/JWy2oXxtjJ/e89Z70ImfD2ok= google.golang.org/grpc v1.73.0/go.mod h1:50sbHOUqWoCQGI8V2HQLJM0B+LMlIUjNSZmow7EVBQc= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.5.1 h1:F29+wU6Ee6qgu9TddPgooOdaqsxTMunOoj8KA5yuS5A= @@ -896,9 +863,6 @@ google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2 google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/hack/update-all-requirements.sh b/hack/update-all-requirements.sh index 0b4cd533ba3..c18acca7a4d 100755 --- a/hack/update-all-requirements.sh +++ b/hack/update-all-requirements.sh @@ -20,5 +20,4 @@ REPO_ROOT="${DIR}/.." cd "${REPO_ROOT}/backend/src/apiserver/visualization" && bash update_requirements.sh cd "${REPO_ROOT}/test/sample-test/hack" && bash update_requirements.sh -cd "${REPO_ROOT}/backend/metadata_writer" && bash update_requirements.sh cd "${REPO_ROOT}/backend" && bash update_requirements.sh diff --git a/manifests/kustomize/base/installs/multi-user/kustomization.yaml b/manifests/kustomize/base/installs/multi-user/kustomization.yaml index 33c4127f0af..6224fb29666 100644 --- a/manifests/kustomize/base/installs/multi-user/kustomization.yaml +++ b/manifests/kustomize/base/installs/multi-user/kustomization.yaml @@ -13,7 +13,6 @@ resources: - viewer-controller - persistence-agent - cache -- metadata-writer - istio-authorization-config.yaml - virtual-service.yaml patches: @@ -23,7 +22,6 @@ patches: - path: scheduled-workflow/deployment-patch.yaml - path: viewer-controller/deployment-patch.yaml - path: persistence-agent/deployment-patch.yaml -- path: metadata-writer/deployment-patch.yaml - path: cache/deployment-patch.yaml configurations: diff --git a/manifests/kustomize/base/installs/multi-user/metadata-writer/cluster-role-binding.yaml b/manifests/kustomize/base/installs/multi-user/metadata-writer/cluster-role-binding.yaml deleted file mode 100644 index 605f1ff0df7..00000000000 --- a/manifests/kustomize/base/installs/multi-user/metadata-writer/cluster-role-binding.yaml +++ /dev/null @@ -1,11 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRoleBinding -metadata: - name: kubeflow-pipelines-metadata-writer-binding -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: ClusterRole - name: kubeflow-pipelines-metadata-writer-role -subjects: -- kind: ServiceAccount - name: kubeflow-pipelines-metadata-writer diff --git a/manifests/kustomize/base/installs/multi-user/metadata-writer/cluster-role.yaml b/manifests/kustomize/base/installs/multi-user/metadata-writer/cluster-role.yaml deleted file mode 100644 index a6ec9867253..00000000000 --- a/manifests/kustomize/base/installs/multi-user/metadata-writer/cluster-role.yaml +++ /dev/null @@ -1,31 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - name: kubeflow-pipelines-metadata-writer-role -rules: -- apiGroups: - - "" - resources: - - pods - verbs: - - get - - list - - watch - - update - - patch -- apiGroups: - - "" - resources: - - configmaps - verbs: - - get -- apiGroups: - - argoproj.io - resources: - - workflows - verbs: - - get - - list - - watch - - update - - patch diff --git a/manifests/kustomize/base/installs/multi-user/metadata-writer/deployment-patch.yaml b/manifests/kustomize/base/installs/multi-user/metadata-writer/deployment-patch.yaml deleted file mode 100644 index 2babe9f43fe..00000000000 --- a/manifests/kustomize/base/installs/multi-user/metadata-writer/deployment-patch.yaml +++ /dev/null @@ -1,13 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: metadata-writer -spec: - template: - spec: - containers: - - name: main - env: - - name: NAMESPACE_TO_WATCH - value: '' - valueFrom: null diff --git a/manifests/kustomize/base/installs/multi-user/metadata-writer/kustomization.yaml b/manifests/kustomize/base/installs/multi-user/metadata-writer/kustomization.yaml deleted file mode 100644 index b1f65469e1d..00000000000 --- a/manifests/kustomize/base/installs/multi-user/metadata-writer/kustomization.yaml +++ /dev/null @@ -1,5 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: -- cluster-role.yaml -- cluster-role-binding.yaml diff --git a/manifests/kustomize/base/installs/multi-user/view-edit-cluster-roles.yaml b/manifests/kustomize/base/installs/multi-user/view-edit-cluster-roles.yaml index 8c86b7971ae..4c4e9061b26 100644 --- a/manifests/kustomize/base/installs/multi-user/view-edit-cluster-roles.yaml +++ b/manifests/kustomize/base/installs/multi-user/view-edit-cluster-roles.yaml @@ -63,6 +63,9 @@ rules: resources: - runs verbs: + - get + - list + - update - archive - create - delete @@ -98,6 +101,14 @@ rules: - workfloweventbindings - workflowtemplates - workflowtaskresults +- apiGroups: + - pipelines.kubeflow.org + resources: + - artifacts + verbs: + - get + - list + - create --- diff --git a/manifests/kustomize/base/metadata/base/kustomization.yaml b/manifests/kustomize/base/metadata/base/kustomization.yaml deleted file mode 100644 index 049b233792e..00000000000 --- a/manifests/kustomize/base/metadata/base/kustomization.yaml +++ /dev/null @@ -1,14 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -namespace: kubeflow -resources: - - metadata-grpc-configmap.yaml - - metadata-grpc-deployment.yaml - - metadata-grpc-service.yaml - - metadata-envoy-configmap.yaml - - metadata-envoy-deployment.yaml - - metadata-envoy-service.yaml - - metadata-grpc-sa.yaml -images: - - name: ghcr.io/kubeflow/kfp-metadata-envoy - newTag: 2.14.3 diff --git a/manifests/kustomize/base/metadata/base/metadata-envoy-configmap.yaml b/manifests/kustomize/base/metadata/base/metadata-envoy-configmap.yaml deleted file mode 100644 index cde88adb4b6..00000000000 --- a/manifests/kustomize/base/metadata/base/metadata-envoy-configmap.yaml +++ /dev/null @@ -1,77 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: metadata-envoy-configmap -data: - envoy-config.yaml: |- - admin: - access_log: - name: admin_access - typed_config: - "@type": type.googleapis.com/envoy.extensions.access_loggers.file.v3.FileAccessLog - path: /tmp/admin_access.log - address: - socket_address: { address: 0.0.0.0, port_value: 9901 } - - static_resources: - listeners: - - name: listener_0 - address: - socket_address: { address: 0.0.0.0, port_value: 9090 } - filter_chains: - - filters: - - name: envoy.filters.network.http_connection_manager - typed_config: - "@type": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager - codec_type: auto - stat_prefix: ingress_http - route_config: - name: local_route - virtual_hosts: - - name: local_service - domains: [ "*" ] - routes: - - match: { prefix: "/" } - route: - cluster: metadata-cluster - max_stream_duration: - grpc_timeout_header_max: '0s' - typed_per_filter_config: - envoy.filter.http.cors: - "@type": type.googleapis.com/envoy.extensions.filters.http.cors.v3.CorsPolicy - allow_origin_string_match: - - safe_regex: - regex: ".*" - allow_methods: GET, PUT, DELETE, POST, OPTIONS - allow_headers: keep-alive,user-agent,cache-control,content-type,content-transfer-encoding,custom-header-1,x-accept-content-transfer-encoding,x-accept-response-streaming,x-user-agent,x-grpc-web,grpc-timeout - max_age: "1728000" - expose_headers: custom-header-1,grpc-status,grpc-message - http_filters: - - name: envoy.filters.http.grpc_web - typed_config: - "@type": type.googleapis.com/envoy.extensions.filters.http.grpc_web.v3.GrpcWeb - - name: envoy.filters.http.cors - typed_config: - "@type": type.googleapis.com/envoy.extensions.filters.http.cors.v3.Cors - - name: envoy.filters.http.router - typed_config: - "@type": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router - clusters: - - name: metadata-cluster - connect_timeout: 30.0s - type: logical_dns - typed_extension_protocol_options: - envoy.extensions.upstreams.http.v3.HttpProtocolOptions: - "@type": type.googleapis.com/envoy.extensions.upstreams.http.v3.HttpProtocolOptions - explicit_http_config: - http2_protocol_options: { } - lb_policy: round_robin - load_assignment: - cluster_name: metadata-grpc - endpoints: - - lb_endpoints: - - endpoint: - address: - socket_address: - address: metadata-grpc-service - port_value: 8080 diff --git a/manifests/kustomize/base/metadata/base/metadata-envoy-deployment.yaml b/manifests/kustomize/base/metadata/base/metadata-envoy-deployment.yaml deleted file mode 100644 index 73a0508b673..00000000000 --- a/manifests/kustomize/base/metadata/base/metadata-envoy-deployment.yaml +++ /dev/null @@ -1,43 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: metadata-envoy-deployment - labels: - component: metadata-envoy -spec: - replicas: 1 - selector: - matchLabels: - component: metadata-envoy - template: - metadata: - labels: - component: metadata-envoy - sidecar.istio.io/inject: "false" - spec: - containers: - - name: container - image: ghcr.io/kubeflow/kfp-metadata-envoy:dummy - args: [ "/etc/envoy/envoy-config.yaml" ] - ports: - - name: md-envoy - containerPort: 9090 - - name: envoy-admin - containerPort: 9901 - securityContext: - allowPrivilegeEscalation: false - seccompProfile: - type: RuntimeDefault - runAsNonRoot: true - runAsUser: 1000 - runAsGroup: 0 - capabilities: - drop: - - ALL - volumeMounts: - - name: envoy-config - mountPath: /etc/envoy - volumes: - - name: envoy-config - configMap: - name: metadata-envoy-configmap diff --git a/manifests/kustomize/base/metadata/base/metadata-envoy-service.yaml b/manifests/kustomize/base/metadata/base/metadata-envoy-service.yaml deleted file mode 100644 index 42166c85ccd..00000000000 --- a/manifests/kustomize/base/metadata/base/metadata-envoy-service.yaml +++ /dev/null @@ -1,14 +0,0 @@ -kind: Service -apiVersion: v1 -metadata: - labels: - app: metadata-envoy - name: metadata-envoy-service -spec: - selector: - component: metadata-envoy - type: ClusterIP - ports: - - port: 9090 - protocol: TCP - name: md-envoy diff --git a/manifests/kustomize/base/metadata/base/metadata-grpc-configmap.yaml b/manifests/kustomize/base/metadata/base/metadata-grpc-configmap.yaml deleted file mode 100644 index 08cc7e69278..00000000000 --- a/manifests/kustomize/base/metadata/base/metadata-grpc-configmap.yaml +++ /dev/null @@ -1,9 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: metadata-grpc-configmap - labels: - component: metadata-grpc-server -data: - METADATA_GRPC_SERVICE_HOST: "metadata-grpc-service" - METADATA_GRPC_SERVICE_PORT: "8080" diff --git a/manifests/kustomize/base/metadata/base/metadata-grpc-deployment.yaml b/manifests/kustomize/base/metadata/base/metadata-grpc-deployment.yaml deleted file mode 100644 index d367b97dd7b..00000000000 --- a/manifests/kustomize/base/metadata/base/metadata-grpc-deployment.yaml +++ /dev/null @@ -1,91 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: metadata-grpc-deployment - labels: - component: metadata-grpc-server -spec: - replicas: 1 - selector: - matchLabels: - component: metadata-grpc-server - template: - metadata: - labels: - component: metadata-grpc-server - spec: - securityContext: - seccompProfile: - type: RuntimeDefault - containers: - - name: container - # ! Sync to the same MLMD version: - # * backend/metadata_writer/requirements.in and requirements.txt - # * @kubeflow/frontend/src/mlmd/generated - # * .cloudbuild.yaml and .release.cloudbuild.yaml - # * manifests/kustomize/base/metadata/base/metadata-grpc-deployment.yaml - # * test/tag_for_hosted.sh - image: gcr.io/tfx-oss-public/ml_metadata_store_server:1.14.0 - securityContext: - allowPrivilegeEscalation: false - seccompProfile: - type: RuntimeDefault - runAsNonRoot: true - runAsUser: 1000 - runAsGroup: 0 - capabilities: - drop: - - ALL - env: - - name: DBCONFIG_USER - valueFrom: - secretKeyRef: - name: mysql-secret - key: username - - name: DBCONFIG_PASSWORD - valueFrom: - secretKeyRef: - name: mysql-secret - key: password - - name: MYSQL_DATABASE - valueFrom: - configMapKeyRef: - name: pipeline-install-config - key: mlmdDb - - name: MYSQL_HOST - valueFrom: - configMapKeyRef: - name: pipeline-install-config - key: dbHost - - name: MYSQL_PORT - valueFrom: - configMapKeyRef: - name: pipeline-install-config - key: dbPort - command: ["/bin/metadata_store_server"] - args: ["--grpc_port=8080", - "--mysql_config_database=$(MYSQL_DATABASE)", - "--mysql_config_host=$(MYSQL_HOST)", - "--mysql_config_port=$(MYSQL_PORT)", - "--mysql_config_user=$(DBCONFIG_USER)", - "--mysql_config_password=$(DBCONFIG_PASSWORD)", - "--enable_database_upgrade=true", - "--grpc_channel_arguments=grpc.max_metadata_size=16384" - ] - ports: - - name: grpc-api - containerPort: 8080 - livenessProbe: - tcpSocket: - port: grpc-api - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - readinessProbe: - tcpSocket: - port: grpc-api - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - serviceAccountName: metadata-grpc-server - diff --git a/manifests/kustomize/base/metadata/base/metadata-grpc-sa.yaml b/manifests/kustomize/base/metadata/base/metadata-grpc-sa.yaml deleted file mode 100644 index c8e8d1fc86d..00000000000 --- a/manifests/kustomize/base/metadata/base/metadata-grpc-sa.yaml +++ /dev/null @@ -1,4 +0,0 @@ -apiVersion: v1 -kind: ServiceAccount -metadata: - name: metadata-grpc-server diff --git a/manifests/kustomize/base/metadata/base/metadata-grpc-service.yaml b/manifests/kustomize/base/metadata/base/metadata-grpc-service.yaml deleted file mode 100644 index 7e7b73bf02d..00000000000 --- a/manifests/kustomize/base/metadata/base/metadata-grpc-service.yaml +++ /dev/null @@ -1,14 +0,0 @@ -kind: Service -apiVersion: v1 -metadata: - labels: - app: metadata - name: metadata-grpc-service -spec: - selector: - component: metadata-grpc-server - type: ClusterIP - ports: - - port: 8080 - protocol: TCP - name: grpc-api diff --git a/manifests/kustomize/base/metadata/options/istio/destination-rule.yaml b/manifests/kustomize/base/metadata/options/istio/destination-rule.yaml deleted file mode 100644 index e2ce8ca9720..00000000000 --- a/manifests/kustomize/base/metadata/options/istio/destination-rule.yaml +++ /dev/null @@ -1,9 +0,0 @@ -apiVersion: networking.istio.io/v1alpha3 -kind: DestinationRule -metadata: - name: metadata-grpc-service -spec: - host: metadata-grpc-service.kubeflow.svc.cluster.local - trafficPolicy: - tls: - mode: ISTIO_MUTUAL diff --git a/manifests/kustomize/base/metadata/options/istio/istio-authorization-policy.yaml b/manifests/kustomize/base/metadata/options/istio/istio-authorization-policy.yaml deleted file mode 100644 index 8f41789e8ee..00000000000 --- a/manifests/kustomize/base/metadata/options/istio/istio-authorization-policy.yaml +++ /dev/null @@ -1,11 +0,0 @@ -apiVersion: security.istio.io/v1beta1 -kind: AuthorizationPolicy -metadata: - name: metadata-grpc-service -spec: - action: ALLOW - selector: - matchLabels: - component: metadata-grpc-server - rules: - - {} diff --git a/manifests/kustomize/base/metadata/options/istio/kustomization.yaml b/manifests/kustomize/base/metadata/options/istio/kustomization.yaml deleted file mode 100644 index 029a6937cad..00000000000 --- a/manifests/kustomize/base/metadata/options/istio/kustomization.yaml +++ /dev/null @@ -1,7 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization - -resources: -- istio-authorization-policy.yaml -- destination-rule.yaml -- virtual-service.yaml diff --git a/manifests/kustomize/base/metadata/options/istio/virtual-service.yaml b/manifests/kustomize/base/metadata/options/istio/virtual-service.yaml deleted file mode 100644 index 01e000b65c0..00000000000 --- a/manifests/kustomize/base/metadata/options/istio/virtual-service.yaml +++ /dev/null @@ -1,21 +0,0 @@ -apiVersion: networking.istio.io/v1alpha3 -kind: VirtualService -metadata: - name: metadata-grpc - namespace: kubeflow -spec: - gateways: - - kubeflow-gateway - hosts: - - '*' - http: - - match: - - uri: - prefix: /ml_metadata - rewrite: - uri: /ml_metadata - route: - - destination: - host: metadata-envoy-service.kubeflow.svc.cluster.local - port: - number: 9090 diff --git a/manifests/kustomize/base/metadata/overlays/db/kustomization.yaml b/manifests/kustomize/base/metadata/overlays/db/kustomization.yaml deleted file mode 100644 index 729f16eaa30..00000000000 --- a/manifests/kustomize/base/metadata/overlays/db/kustomization.yaml +++ /dev/null @@ -1,38 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -namespace: kubeflow - -resources: -- ../../base -- metadata-db-pvc.yaml -- metadata-db-deployment.yaml -- metadata-db-service.yaml - -patches: -- path: patches/metadata-grpc-deployment.yaml - -configMapGenerator: -- envs: - - params.env - name: metadata-db-parameters -secretGenerator: -- envs: - - secrets.env - name: metadata-db-secrets -generatorOptions: - disableNameSuffixHash: true - - -images: -- name: mysql - newName: mysql - newTag: 8.0.3 - -vars: -- fieldref: - fieldPath: metadata.name - name: MLMD_DB_HOST - objref: - apiVersion: v1 - kind: Service - name: metadata-db diff --git a/manifests/kustomize/base/metadata/overlays/db/metadata-db-deployment.yaml b/manifests/kustomize/base/metadata/overlays/db/metadata-db-deployment.yaml deleted file mode 100644 index 3101f5d2107..00000000000 --- a/manifests/kustomize/base/metadata/overlays/db/metadata-db-deployment.yaml +++ /dev/null @@ -1,50 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: metadata-db - labels: - component: db -spec: - selector: - matchLabels: - component: db - replicas: 1 - strategy: - type: Recreate - template: - metadata: - name: db - labels: - component: db - sidecar.istio.io/inject: "false" - spec: - containers: - - name: db-container - image: mysql:8.0.3 - args: - - --datadir - - /var/lib/mysql/datadir - envFrom: - - configMapRef: - name: metadata-db-parameters - - secretRef: - name: metadata-db-secrets - ports: - - name: dbapi - containerPort: 3306 - readinessProbe: - exec: - command: - - "/bin/bash" - - "-c" - - "mysql -D $$MYSQL_DATABASE -p$$MYSQL_ROOT_PASSWORD -e 'SELECT 1'" - initialDelaySeconds: 5 - periodSeconds: 2 - timeoutSeconds: 1 - volumeMounts: - - name: metadata-mysql - mountPath: /var/lib/mysql - volumes: - - name: metadata-mysql - persistentVolumeClaim: - claimName: metadata-mysql diff --git a/manifests/kustomize/base/metadata/overlays/db/metadata-db-pvc.yaml b/manifests/kustomize/base/metadata/overlays/db/metadata-db-pvc.yaml deleted file mode 100644 index b1c083d9f45..00000000000 --- a/manifests/kustomize/base/metadata/overlays/db/metadata-db-pvc.yaml +++ /dev/null @@ -1,10 +0,0 @@ -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: metadata-mysql -spec: - accessModes: - - ReadWriteOnce - resources: - requests: - storage: 10Gi diff --git a/manifests/kustomize/base/metadata/overlays/db/metadata-db-service.yaml b/manifests/kustomize/base/metadata/overlays/db/metadata-db-service.yaml deleted file mode 100644 index b7a6401714a..00000000000 --- a/manifests/kustomize/base/metadata/overlays/db/metadata-db-service.yaml +++ /dev/null @@ -1,14 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: metadata-db - labels: - component: db -spec: - type: ClusterIP - ports: - - port: 3306 - protocol: TCP - name: dbapi - selector: - component: db diff --git a/manifests/kustomize/base/metadata/overlays/db/params.env b/manifests/kustomize/base/metadata/overlays/db/params.env deleted file mode 100644 index 5ab2adb3bb5..00000000000 --- a/manifests/kustomize/base/metadata/overlays/db/params.env +++ /dev/null @@ -1,3 +0,0 @@ -MYSQL_DATABASE=metadb -MYSQL_PORT=3306 -MYSQL_ALLOW_EMPTY_PASSWORD=true \ No newline at end of file diff --git a/manifests/kustomize/base/metadata/overlays/db/patches/metadata-grpc-deployment.yaml b/manifests/kustomize/base/metadata/overlays/db/patches/metadata-grpc-deployment.yaml deleted file mode 100644 index 03023a3404c..00000000000 --- a/manifests/kustomize/base/metadata/overlays/db/patches/metadata-grpc-deployment.yaml +++ /dev/null @@ -1,25 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: metadata-grpc-deployment -spec: - template: - spec: - containers: - - name: container - # Remove existing environment variables - env: - - $patch: replace - envFrom: - - configMapRef: - name: metadata-db-parameters - - secretRef: - name: metadata-db-secrets - - configMapRef: - name: metadata-grpc-configmap - args: ["--grpc_port=$(METADATA_GRPC_SERVICE_PORT)", - "--mysql_config_host=$(MLMD_DB_HOST)", - "--mysql_config_database=$(MYSQL_DATABASE)", - "--mysql_config_port=$(MYSQL_PORT)", - "--mysql_config_user=$(MYSQL_USER_NAME)", - "--mysql_config_password=$(MYSQL_ROOT_PASSWORD)"] diff --git a/manifests/kustomize/base/metadata/overlays/db/secrets.env b/manifests/kustomize/base/metadata/overlays/db/secrets.env deleted file mode 100644 index 44ac2ee3980..00000000000 --- a/manifests/kustomize/base/metadata/overlays/db/secrets.env +++ /dev/null @@ -1,2 +0,0 @@ -MYSQL_USER_NAME=root -MYSQL_ROOT_PASSWORD=test \ No newline at end of file diff --git a/manifests/kustomize/base/metadata/overlays/postgres/kustomization.yaml b/manifests/kustomize/base/metadata/overlays/postgres/kustomization.yaml deleted file mode 100644 index f975b8f522b..00000000000 --- a/manifests/kustomize/base/metadata/overlays/postgres/kustomization.yaml +++ /dev/null @@ -1,37 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -namespace: kubeflow - -resources: -- ../../base -- metadata-db-pvc.yaml -- metadata-db-deployment.yaml -- metadata-db-service.yaml - -patches: -- path: patches/metadata-grpc-deployment.yaml - -configMapGenerator: -- envs: - - params.env - name: metadata-postgres-db-parameters -secretGenerator: -- envs: - - secrets.env - name: metadata-postgres-db-secrets -generatorOptions: - disableNameSuffixHash: true - -images: -- name: postgres - newName: postgres - newTag: 14.7-alpine3.17 - -vars: -- fieldref: - fieldPath: metadata.name - name: MLMD_DB_HOST - objref: - apiVersion: v1 - kind: Service - name: metadata-postgres-db diff --git a/manifests/kustomize/base/metadata/overlays/postgres/metadata-db-deployment.yaml b/manifests/kustomize/base/metadata/overlays/postgres/metadata-db-deployment.yaml deleted file mode 100644 index 41d2b5176a8..00000000000 --- a/manifests/kustomize/base/metadata/overlays/postgres/metadata-db-deployment.yaml +++ /dev/null @@ -1,41 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: metadata-postgres-db - labels: - component: db -spec: - selector: - matchLabels: - component: db - replicas: 1 - strategy: - type: Recreate - template: - metadata: - name: db - labels: - component: db - sidecar.istio.io/inject: "false" - spec: - containers: - - name: db-container - image: postgres - env: - - name: PGDATA - value: /var/lib/postgresql/data/pgdata - envFrom: - - configMapRef: - name: metadata-postgres-db-parameters - - secretRef: - name: metadata-postgres-db-secrets - ports: - - name: postgres - containerPort: 5432 - volumeMounts: - - name: metadata-postgres - mountPath: /var/lib/postgresql/data - volumes: - - name: metadata-postgres - persistentVolumeClaim: - claimName: metadata-postgres diff --git a/manifests/kustomize/base/metadata/overlays/postgres/metadata-db-pvc.yaml b/manifests/kustomize/base/metadata/overlays/postgres/metadata-db-pvc.yaml deleted file mode 100644 index 13790489fa9..00000000000 --- a/manifests/kustomize/base/metadata/overlays/postgres/metadata-db-pvc.yaml +++ /dev/null @@ -1,10 +0,0 @@ -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: metadata-postgres -spec: - accessModes: - - ReadWriteOnce - resources: - requests: - storage: 20Gi diff --git a/manifests/kustomize/base/metadata/overlays/postgres/metadata-db-service.yaml b/manifests/kustomize/base/metadata/overlays/postgres/metadata-db-service.yaml deleted file mode 100644 index 63902a66618..00000000000 --- a/manifests/kustomize/base/metadata/overlays/postgres/metadata-db-service.yaml +++ /dev/null @@ -1,14 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: metadata-postgres-db - labels: - component: db -spec: - type: ClusterIP - ports: - - port: 5432 - protocol: TCP - name: postgres - selector: - component: db diff --git a/manifests/kustomize/base/metadata/overlays/postgres/params.env b/manifests/kustomize/base/metadata/overlays/postgres/params.env deleted file mode 100644 index fce7e26772b..00000000000 --- a/manifests/kustomize/base/metadata/overlays/postgres/params.env +++ /dev/null @@ -1,2 +0,0 @@ -POSTGRES_PORT=5432 -POSTGRES_DBNAME=mlmdpostgres \ No newline at end of file diff --git a/manifests/kustomize/base/metadata/overlays/postgres/patches/metadata-grpc-deployment.yaml b/manifests/kustomize/base/metadata/overlays/postgres/patches/metadata-grpc-deployment.yaml deleted file mode 100644 index 9f3c052b7b6..00000000000 --- a/manifests/kustomize/base/metadata/overlays/postgres/patches/metadata-grpc-deployment.yaml +++ /dev/null @@ -1,28 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: metadata-grpc-deployment -spec: - template: - spec: - containers: - - name: container - # Remove existing environment variables - env: - - $patch: replace - envFrom: - - configMapRef: - name: metadata-postgres-db-parameters - - secretRef: - name: metadata-postgres-db-secrets - - configMapRef: - name: metadata-grpc-configmap - args: ["--grpc_port=$(METADATA_GRPC_SERVICE_PORT)", - "--metadata_source_config_type=postgresql", - "--postgres_config_host=$(MLMD_DB_HOST)", - "--postgres_config_port=$(POSTGRES_PORT)", - "--postgres_config_dbname=$(POSTGRES_DBNAME)", - "--postgres_config_user=$(POSTGRES_USER)", - "--postgres_config_password=$(POSTGRES_PASSWORD)", - # "--postgres_config_skip_db_creation=true", - "--enable_database_upgrade=true"] diff --git a/manifests/kustomize/base/metadata/overlays/postgres/secrets.env b/manifests/kustomize/base/metadata/overlays/postgres/secrets.env deleted file mode 100644 index 973d1582830..00000000000 --- a/manifests/kustomize/base/metadata/overlays/postgres/secrets.env +++ /dev/null @@ -1,2 +0,0 @@ -POSTGRES_USER=root -POSTGRES_PASSWORD=password \ No newline at end of file diff --git a/manifests/kustomize/base/pipeline/allow-same-namespace-networkpolicy.yaml b/manifests/kustomize/base/pipeline/allow-same-namespace-networkpolicy.yaml index 91ba34c619c..6eb9f34a3ff 100644 --- a/manifests/kustomize/base/pipeline/allow-same-namespace-networkpolicy.yaml +++ b/manifests/kustomize/base/pipeline/allow-same-namespace-networkpolicy.yaml @@ -8,5 +8,14 @@ spec: ingress: - from: - podSelector: {} + # Allow pods in profile namespace (namely launcher/driver) pods to communicate with + # the API server. + - from: + - namespaceSelector: + matchLabels: + app.kubernetes.io/part-of: kubeflow-profile + ports: + - protocol: TCP + port: 8887 policyTypes: - Ingress diff --git a/manifests/kustomize/base/pipeline/kustomization.yaml b/manifests/kustomize/base/pipeline/kustomization.yaml index e17b443a0d5..9b6c63bdf13 100644 --- a/manifests/kustomize/base/pipeline/kustomization.yaml +++ b/manifests/kustomize/base/pipeline/kustomization.yaml @@ -1,7 +1,6 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization resources: - - metadata-writer - ml-pipeline-apiserver-deployment.yaml - ml-pipeline-apiserver-role.yaml - ml-pipeline-apiserver-rolebinding.yaml diff --git a/manifests/kustomize/base/pipeline/metadata-writer/kustomization.yaml b/manifests/kustomize/base/pipeline/metadata-writer/kustomization.yaml deleted file mode 100644 index 18f17463ec0..00000000000 --- a/manifests/kustomize/base/pipeline/metadata-writer/kustomization.yaml +++ /dev/null @@ -1,10 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: - - metadata-writer-deployment.yaml - - metadata-writer-role.yaml - - metadata-writer-rolebinding.yaml - - metadata-writer-sa.yaml -images: - - name: ghcr.io/kubeflow/kfp-metadata-writer - newTag: 2.14.3 diff --git a/manifests/kustomize/base/pipeline/metadata-writer/metadata-writer-deployment.yaml b/manifests/kustomize/base/pipeline/metadata-writer/metadata-writer-deployment.yaml deleted file mode 100644 index 0fa90266a02..00000000000 --- a/manifests/kustomize/base/pipeline/metadata-writer/metadata-writer-deployment.yaml +++ /dev/null @@ -1,37 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: metadata-writer - labels: - app: metadata-writer -spec: - replicas: 1 - selector: - matchLabels: - app: metadata-writer - template: - metadata: - labels: - app: metadata-writer - spec: - securityContext: - seccompProfile: - type: RuntimeDefault - - containers: - - name: main - image: ghcr.io/kubeflow/kfp-metadata-writer:dummy - env: - - name: NAMESPACE_TO_WATCH - valueFrom: - fieldRef: - fieldPath: metadata.namespace - securityContext: - allowPrivilegeEscalation: false - runAsNonRoot: true - runAsUser: 1000 - runAsGroup: 0 - capabilities: - drop: - - ALL - serviceAccountName: kubeflow-pipelines-metadata-writer diff --git a/manifests/kustomize/base/pipeline/metadata-writer/metadata-writer-role.yaml b/manifests/kustomize/base/pipeline/metadata-writer/metadata-writer-role.yaml deleted file mode 100644 index 06317b0845c..00000000000 --- a/manifests/kustomize/base/pipeline/metadata-writer/metadata-writer-role.yaml +++ /dev/null @@ -1,33 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: - labels: - app: kubeflow-pipelines-metadata-writer-role - name: kubeflow-pipelines-metadata-writer-role -rules: -- apiGroups: - - "" - resources: - - pods - verbs: - - get - - list - - watch - - update - - patch -- apiGroups: - - "" - resources: - - configmaps - verbs: - - get -- apiGroups: - - argoproj.io - resources: - - workflows - verbs: - - get - - list - - watch - - update - - patch diff --git a/manifests/kustomize/base/pipeline/metadata-writer/metadata-writer-rolebinding.yaml b/manifests/kustomize/base/pipeline/metadata-writer/metadata-writer-rolebinding.yaml deleted file mode 100644 index 5a6c1fef249..00000000000 --- a/manifests/kustomize/base/pipeline/metadata-writer/metadata-writer-rolebinding.yaml +++ /dev/null @@ -1,11 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: - name: kubeflow-pipelines-metadata-writer-binding -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: kubeflow-pipelines-metadata-writer-role -subjects: -- kind: ServiceAccount - name: kubeflow-pipelines-metadata-writer \ No newline at end of file diff --git a/manifests/kustomize/base/pipeline/metadata-writer/metadata-writer-sa.yaml b/manifests/kustomize/base/pipeline/metadata-writer/metadata-writer-sa.yaml deleted file mode 100644 index 77812949a84..00000000000 --- a/manifests/kustomize/base/pipeline/metadata-writer/metadata-writer-sa.yaml +++ /dev/null @@ -1,4 +0,0 @@ -apiVersion: v1 -kind: ServiceAccount -metadata: - name: kubeflow-pipelines-metadata-writer diff --git a/manifests/kustomize/base/pipeline/pipeline-runner-role.yaml b/manifests/kustomize/base/pipeline/pipeline-runner-role.yaml index eba0ee9f2d6..b86e4ecc13a 100644 --- a/manifests/kustomize/base/pipeline/pipeline-runner-role.yaml +++ b/manifests/kustomize/base/pipeline/pipeline-runner-role.yaml @@ -85,3 +85,19 @@ rules: verbs: - create - patch +- apiGroups: + - pipelines.kubeflow.org + resources: + - runs + verbs: + - get + - list + - update +- apiGroups: + - pipelines.kubeflow.org + resources: + - artifacts + verbs: + - get + - list + - create \ No newline at end of file diff --git a/manifests/kustomize/env/cert-manager/base-tls-certs/kfp-api-cert.yaml b/manifests/kustomize/env/cert-manager/base-tls-certs/kfp-api-cert.yaml index d01ffcf3f98..4785ca3e91a 100644 --- a/manifests/kustomize/env/cert-manager/base-tls-certs/kfp-api-cert.yaml +++ b/manifests/kustomize/env/cert-manager/base-tls-certs/kfp-api-cert.yaml @@ -9,10 +9,6 @@ spec: - ml-pipeline - ml-pipeline.kubeflow - ml-pipeline-scheduledworkflow - - metadata-envoy - - metadata-envoy-service - - metadata-grpc-service - - metadata-grpc-service.kubeflow - localhost ipAddresses: # Necessary for running TLS-enabled cluster locally. diff --git a/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/README.md b/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/README.md index 1c8a5eefc25..fb1d1512713 100644 --- a/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/README.md +++ b/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/README.md @@ -9,9 +9,5 @@ The Scheduledworkflow Controller is mounted with the TLS cert CA and sends HTTPS The Persistence Agent is mounted with the TLS cert CA and sends HTTPS requests to the API server. ### KFP UI The UI deployment is mounted with the TLS cert CA and sends HTTPS requests to the metadata-envoy deployment and the API server. -### Metadata-Envoy -The Metadata Envoy deployment is mounted with TLS key/cert and serves requests over TLS -### Metadata-gRPC -The Metadata gRPC deployment is mounted with TLS key/cert and serves requests over TLS ### Driver & Launcher The driver and launcher pods are configured with the TLS cert CA in addition to system CAs in order to send HTTPS requests to the both the API server and metadata-gRPC deployment. \ No newline at end of file diff --git a/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/kustomization.yaml b/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/kustomization.yaml index dbd6a30d394..fda7bf4571f 100644 --- a/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/kustomization.yaml +++ b/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/kustomization.yaml @@ -5,7 +5,6 @@ resources: - ../../../base/crds - ../../platform-agnostic - ../base-tls-certs/ - - patches/metadata-envoy-configmap.yaml namespace: kubeflow @@ -22,22 +21,6 @@ patches: target: kind: Deployment name: ml-pipeline-scheduledworkflow - - path: patches/metadata-envoy-deployment.yaml - target: - kind: Deployment - name: metadata-envoy-deployment - - path: patches/metadata-envoy-service.yaml - target: - kind: Service - name: metadata-envoy-service - - path: patches/metadata-grpc-deployment.yaml - target: - kind: Deployment - name: metadata-grpc-deployment - - path: patches/metadata-writer-deployment.yaml - target: - kind: Deployment - name: metadata-writer - path: patches/ml-pipeline-persistenceagent-deployment.yaml target: kind: Deployment diff --git a/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/patches/metadata-envoy-configmap.yaml b/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/patches/metadata-envoy-configmap.yaml deleted file mode 100644 index 1cc6622dd00..00000000000 --- a/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/patches/metadata-envoy-configmap.yaml +++ /dev/null @@ -1,97 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: envoy-config -data: - envoy-config.yaml: |- - admin: - access_log: - name: admin_access - typed_config: - "@type": type.googleapis.com/envoy.extensions.access_loggers.file.v3.FileAccessLog - path: /tmp/admin_access.log - address: - socket_address: { address: 0.0.0.0, port_value: 9901 } - - static_resources: - listeners: - - name: listener_0 - address: - socket_address: { address: 0.0.0.0, port_value: 9090 } - filter_chains: - - transport_socket: - name: envoy.transport_sockets.tls - typed_config: - "@type": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext - common_tls_context: - tls_certificates: - - certificate_chain: - filename: /etc/envoy/tls/tls.crt - private_key: - filename: /etc/envoy/tls/tls.key - alpn_protocols: [ "h2", "http/1.1" ] - require_client_certificate: false - filters: - - name: envoy.filters.network.http_connection_manager - typed_config: - "@type": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager - codec_type: auto - stat_prefix: ingress_http - route_config: - name: local_route - virtual_hosts: - - name: local_service - domains: [ "*" ] - routes: - - match: { prefix: "/" } - route: - cluster: metadata-cluster - max_stream_duration: - grpc_timeout_header_max: '0s' - typed_per_filter_config: - envoy.filter.http.cors: - "@type": type.googleapis.com/envoy.extensions.filters.http.cors.v3.CorsPolicy - allow_origin_string_match: - - safe_regex: { regex: ".*" } - allow_methods: GET, PUT, DELETE, POST, OPTIONS - allow_headers: keep-alive,user-agent,cache-control,content-type,content-transfer-encoding,custom-header-1,x-accept-content-transfer-encoding,x-accept-response-streaming,x-user-agent,x-grpc-web,grpc-timeout - max_age: "1728000" - expose_headers: custom-header-1,grpc-status,grpc-message - http_filters: - - name: envoy.filters.http.grpc_web - typed_config: - "@type": type.googleapis.com/envoy.extensions.filters.http.grpc_web.v3.GrpcWeb - - name: envoy.filters.http.cors - typed_config: - "@type": type.googleapis.com/envoy.extensions.filters.http.cors.v3.Cors - - name: envoy.filters.http.router - typed_config: - "@type": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router - clusters: - - name: metadata-cluster - connect_timeout: 30.0s - type: logical_dns - lb_policy: round_robin - load_assignment: - cluster_name: metadata-grpc - endpoints: - - lb_endpoints: - - endpoint: - address: - socket_address: - address: metadata-grpc-service - port_value: 8080 - typed_extension_protocol_options: - envoy.extensions.upstreams.http.v3.HttpProtocolOptions: - "@type": type.googleapis.com/envoy.extensions.upstreams.http.v3.HttpProtocolOptions - explicit_http_config: - http2_protocol_options: { } - transport_socket: - name: envoy.transport_sockets.tls - typed_config: - "@type": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.UpstreamTlsContext - sni: metadata-grpc-service.kubeflow.svc - common_tls_context: - validation_context: - trusted_ca: - filename: /etc/envoy/tls/ca.crt diff --git a/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/patches/metadata-envoy-deployment.yaml b/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/patches/metadata-envoy-deployment.yaml deleted file mode 100644 index 0fa72587d37..00000000000 --- a/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/patches/metadata-envoy-deployment.yaml +++ /dev/null @@ -1,26 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: metadata-envoy-deployment - labels: - component: metadata-envoy -spec: - template: - spec: - containers: - - name: container - args: [ "/etc/envoy/envoy-config.yaml" ] - volumeMounts: - - name: tls-certs - mountPath: /etc/envoy/tls - readOnly: true - - name: envoy-config-tls-enabled - mountPath: /etc/envoy - readOnly: true - volumes: - - name: tls-certs - secret: - secretName: kfp-api-tls-cert - - name: envoy-config-tls-enabled - configMap: - name: envoy-config diff --git a/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/patches/metadata-envoy-service.yaml b/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/patches/metadata-envoy-service.yaml deleted file mode 100644 index 03e051fa616..00000000000 --- a/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/patches/metadata-envoy-service.yaml +++ /dev/null @@ -1,10 +0,0 @@ -kind: Service -apiVersion: v1 -metadata: - name: metadata-envoy-service -spec: - ports: - - name: tls - port: 9090 - protocol: TCP - appProtocol: https diff --git a/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/patches/metadata-grpc-config-writer.yaml b/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/patches/metadata-grpc-config-writer.yaml deleted file mode 100644 index 284edfce085..00000000000 --- a/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/patches/metadata-grpc-config-writer.yaml +++ /dev/null @@ -1,31 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: metadata-grpc-config-writer -data: - generate-config.sh: |- - #!/bin/sh - set -e - - escape() { - sed ':a;N;$!ba;s/\n/\\n/g' "$1" - } - - cat </etc/grpc/config.proto - connection_config { - mysql { - host: "${MYSQL_HOST}" - port: ${MYSQL_PORT} - database: "${MYSQL_DATABASE}" - user: "${DBCONFIG_USER}" - password: "${DBCONFIG_PASSWORD}" - } - } - ssl_config { - server_key: "$(escape /etc/pki/tls/certs/tls.key)" - server_cert: "$(escape /etc/pki/tls/certs/tls.crt)" - custom_ca: "$(escape /etc/pki/tls/certs/ca.crt)" - client_verify: false - } - - EOF diff --git a/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/patches/metadata-grpc-deployment.yaml b/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/patches/metadata-grpc-deployment.yaml deleted file mode 100644 index b22cd3a6166..00000000000 --- a/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/patches/metadata-grpc-deployment.yaml +++ /dev/null @@ -1,91 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: metadata-grpc-deployment - labels: - component: metadata-grpc-server -spec: - template: - spec: - volumes: - - name: tls-certs - secret: - secretName: kfp-api-tls-cert - - name: grpc-tls-config - emptyDir: { } - - name: mysql-secret - secret: - secretName: mysql-secret - - initContainers: - - name: generate-config - image: gcr.io/tfx-oss-public/ml_metadata_store_server:1.14.0 - imagePullPolicy: IfNotPresent - command: [ "/bin/sh", "-ec" ] - args: - - | - escape() { sed ':a;N;$!ba;s/\n/\\n/g' "$1"; } - cat </etc/grpc/config.proto - connection_config { - mysql { - host: "${MYSQL_HOST}" - port: ${MYSQL_PORT} - database: "${MYSQL_DATABASE}" - user: "${DBCONFIG_USER}" - password: "${DBCONFIG_PASSWORD}" - } - } - ssl_config { - server_key: "$(escape /etc/pki/tls/certs/tls.key)" - server_cert: "$(escape /etc/pki/tls/certs/tls.crt)" - custom_ca: "$(escape /etc/pki/tls/certs/ca.crt)" - client_verify: false - } - EOF - env: - - name: DBCONFIG_USER - valueFrom: - secretKeyRef: - name: mysql-secret - key: username - - name: DBCONFIG_PASSWORD - valueFrom: - secretKeyRef: - name: mysql-secret - key: password - - name: MYSQL_DATABASE - valueFrom: - configMapKeyRef: - name: pipeline-install-config - key: mlmdDb - - name: MYSQL_HOST - valueFrom: - configMapKeyRef: - name: pipeline-install-config - key: dbHost - - name: MYSQL_PORT - valueFrom: - configMapKeyRef: - name: pipeline-install-config - key: dbPort - volumeMounts: - - name: tls-certs - mountPath: /etc/pki/tls/certs - - name: grpc-tls-config - mountPath: /etc/grpc - - name: mysql-secret - mountPath: /etc/mysql-secret - - containers: - - name: container - args: - - "--grpc_port=8080" - - "--metadata_store_server_config_file=/etc/grpc/config.proto" - - "--enable_database_upgrade=true" - - "--grpc_channel_arguments=grpc.max_metadata_size=16384" - volumeMounts: - - name: tls-certs - mountPath: /etc/pki/tls/certs - readOnly: true - - name: grpc-tls-config - mountPath: /etc/grpc diff --git a/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/patches/metadata-writer-deployment.yaml b/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/patches/metadata-writer-deployment.yaml deleted file mode 100644 index cb8f23ba8a8..00000000000 --- a/manifests/kustomize/env/cert-manager/platform-agnostic-standalone-tls/patches/metadata-writer-deployment.yaml +++ /dev/null @@ -1,26 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: metadata-writer -spec: - template: - spec: - containers: - - name: main - env: - - name: METADATA_GRPC_SERVICE_SERVICE_HOST - value: "metadata-grpc-service.kubeflow" - - name: METADATA_GRPC_SERVICE_SERVICE_PORT - value: "8080" - - name: METADATA_TLS_ENABLED - value: "true" - - name: CA_CERT_PATH - value: "/etc/pki/tls/certs/ca.crt" - volumeMounts: - - name: tls-certs - mountPath: /etc/pki/tls/certs - readOnly: true - volumes: - - name: tls-certs - secret: - secretName: kfp-api-tls-cert diff --git a/manifests/kustomize/env/dev/kustomization.yaml b/manifests/kustomize/env/dev/kustomization.yaml index f2ed5ddabf1..d778aea11d7 100644 --- a/manifests/kustomize/env/dev/kustomization.yaml +++ b/manifests/kustomize/env/dev/kustomization.yaml @@ -21,10 +21,6 @@ images: newTag: master - name: ghcr.io/kubeflow/kfp-cache-server newTag: master -- name: ghcr.io/kubeflow/kfp-metadata-envoy - newTag: master -- name: ghcr.io/kubeflow/kfp-metadata-writer - newTag: master - name: ghcr.io/kubeflow/kfp-viewer-crd-controller newTag: master - name: ghcr.io/kubeflow/kfp-visualization-server diff --git a/manifests/kustomize/env/gcp/kustomization.yaml b/manifests/kustomize/env/gcp/kustomization.yaml index 65fcea315b3..3556a915343 100644 --- a/manifests/kustomize/env/gcp/kustomization.yaml +++ b/manifests/kustomize/env/gcp/kustomization.yaml @@ -6,7 +6,6 @@ resources: - ../../third-party/application - ../../base/application - ../../base/installs/generic -- ../../base/metadata/base - ../../third-party/argo/installs/namespace - inverse-proxy - cloudsql-proxy diff --git a/manifests/kustomize/env/openshift/base/kustomization.yaml b/manifests/kustomize/env/openshift/base/kustomization.yaml index 8e7237829aa..13ee511ffa3 100644 --- a/manifests/kustomize/env/openshift/base/kustomization.yaml +++ b/manifests/kustomize/env/openshift/base/kustomization.yaml @@ -37,24 +37,6 @@ patches: # Openshift provides its own default restricted SCC's # Thus the following can be omitted: - - path: patches/remove-sc.json - target: - group: apps - kind: Deployment - name: metadata-envoy-deployment - version: v1 - - path: patches/remove-sc.json - target: - group: apps - kind: Deployment - name: metadata-grpc-deployment - version: v1 - - path: patches/remove-sc.json - target: - group: apps - kind: Deployment - name: metadata-writer - version: v1 - path: patches/remove-sc.json target: group: apps diff --git a/manifests/kustomize/env/plain-multi-user/kustomization.yaml b/manifests/kustomize/env/plain-multi-user/kustomization.yaml index 5f1016d0288..bbae8e77751 100644 --- a/manifests/kustomize/env/plain-multi-user/kustomization.yaml +++ b/manifests/kustomize/env/plain-multi-user/kustomization.yaml @@ -3,8 +3,6 @@ kind: Kustomization resources: - ../../base/installs/multi-user -- ../../base/metadata/base -- ../../base/metadata/options/istio - ../../third-party/mysql/base - ../../third-party/mysql/options/istio - ../../third-party/seaweedfs/istio diff --git a/manifests/kustomize/env/plain/kustomization.yaml b/manifests/kustomize/env/plain/kustomization.yaml index dcbc92514b3..8719e9eaa92 100644 --- a/manifests/kustomize/env/plain/kustomization.yaml +++ b/manifests/kustomize/env/plain/kustomization.yaml @@ -3,7 +3,6 @@ kind: Kustomization resources: - ../../base/installs/generic -- ../../base/metadata/base - ../../third-party/seaweedfs/base - ../../third-party/mysql/base diff --git a/manifests/kustomize/env/platform-agnostic-minio/kustomization.yaml b/manifests/kustomize/env/platform-agnostic-minio/kustomization.yaml index d8ba71c4a4e..dd4470c4544 100644 --- a/manifests/kustomize/env/platform-agnostic-minio/kustomization.yaml +++ b/manifests/kustomize/env/platform-agnostic-minio/kustomization.yaml @@ -5,7 +5,6 @@ kind: Kustomization resources: - ../../base/installs/generic -- ../../base/metadata/base - ../../third-party/argo/installs/namespace - ../../third-party/minio/base - ../../third-party/mysql/base diff --git a/manifests/kustomize/env/platform-agnostic-multi-user-minio/kustomization.yaml b/manifests/kustomize/env/platform-agnostic-multi-user-minio/kustomization.yaml index 895ee2254de..67e040121fa 100644 --- a/manifests/kustomize/env/platform-agnostic-multi-user-minio/kustomization.yaml +++ b/manifests/kustomize/env/platform-agnostic-multi-user-minio/kustomization.yaml @@ -6,8 +6,6 @@ kind: Kustomization resources: - ../../third-party/metacontroller/base - ../../base/installs/multi-user -- ../../base/metadata/base -- ../../base/metadata/options/istio - ../../third-party/argo/installs/cluster - ../../third-party/mysql/base - ../../third-party/mysql/options/istio diff --git a/manifests/kustomize/env/platform-agnostic-multi-user/kustomization.yaml b/manifests/kustomize/env/platform-agnostic-multi-user/kustomization.yaml index 014f327cc2a..101be4a711e 100644 --- a/manifests/kustomize/env/platform-agnostic-multi-user/kustomization.yaml +++ b/manifests/kustomize/env/platform-agnostic-multi-user/kustomization.yaml @@ -4,8 +4,6 @@ kind: Kustomization resources: - ../../third-party/metacontroller/base - ../../base/installs/multi-user -- ../../base/metadata/base -- ../../base/metadata/options/istio - ../../third-party/argo/installs/cluster - ../../third-party/mysql/base - ../../third-party/mysql/options/istio diff --git a/manifests/kustomize/env/platform-agnostic/kustomization.yaml b/manifests/kustomize/env/platform-agnostic/kustomization.yaml index e1e4cdd4779..fa55a268fdf 100644 --- a/manifests/kustomize/env/platform-agnostic/kustomization.yaml +++ b/manifests/kustomize/env/platform-agnostic/kustomization.yaml @@ -3,7 +3,6 @@ kind: Kustomization resources: - ../../base/installs/generic -- ../../base/metadata/base - ../../third-party/argo/installs/namespace - ../../third-party/seaweedfs/base - ../../third-party/mysql/base diff --git a/proposals/12147-mlmd-removal/design-details.md b/proposals/12147-mlmd-removal/design-details.md index 85a56ca4e50..35c255f1f7f 100644 --- a/proposals/12147-mlmd-removal/design-details.md +++ b/proposals/12147-mlmd-removal/design-details.md @@ -243,6 +243,32 @@ This is a good opportunity to also replace the endpoints used in `cacheDefaultEn Other changes that will be required in Launcher are mentioned elsewhere in the proposal (see [Caching](#caching), and [Metrics](#metrics) sections). +#### Importer + +##### Matching artifacts +Consider the following example: +* An artifact is imported via the dsl +* ReImport is set to false + +Then previously the launcher would search for a matching artifact, and artifact would match if: + +* All the artifact's fields (except the artifact ID) are equal +* The artifact is attributed to the same PipelineContext that the launcher is running in + +Once MLMD is removed and Artifacts hold a namespace property, instead of filtering on the context, the +Artifact will match if: + +* All the artifact's fields (except the artifact ID) are equal +* The artifact is in the same namespace as the launcher is running in + +##### Artifact names + +Artifacts will be provided a new name as part of the artifact model. This will serve as the artifact's canonical name. +In the future the user will be able to specify the artifact name in the pipeline definition via the KFP sdk. + +For now the artifact name is inferred via the output artifact names when it is uploaded. +When the artifact is imported, we infer the name using the base file name of the artifact URI. + ### Nested Pipelines There is no direct way to infer whether a Driver run is for a Nested execution, to accommodate this, there is a generic `DAG` task type provided to fit such cases. @@ -403,9 +429,13 @@ StorageStates ### Auth Considerations -The Driver/Launcher will be introducing a new `RunServerClient` and `ArtifactServerClient` using the `v2beta1`. All calls to this endpoint must be protected via SubjectAccessReview. The new server implementations can simply use `resourceManager.IsAuthorized(ctx, resourceAttributes)`, which is the standard everywhere else in KFP. All tasks/artifacts/metrics endpoints will be doing SAR on the `run` resource. If a user makes a REST request with a `verb` that matches their permissions on the `Run` KFP resource, they will be authorized to perform that action. +The Driver/Launcher will be introducing a new `RunServerClient` and `ArtifactServerClient` using the `v2beta1`. All calls to this endpoint must be protected via SubjectAccessReview. The new server implementations can simply use `resourceManager.IsAuthorized(ctx, resourceAttributes)`, which is the standard everywhere else in KFP. +With Regards to Artifacts authorization will be handled in this way: -For example, if a user makes a request to `ListArtifactRequest`, they require `list` verb on the `Run` resource for that particular namespace. +* Artifacts in Runs: Users only need RBAC on the associated Run resource to access artifacts within a run. +* Get/List Artifacts: To perform Get or List operations on artifacts, users require RBAC on the “artifacts” resource. +* Reimport = false: For artifacts where Reimport is set to false, users need to get RBAC on the “artifacts” resource within the artifact's originating namespace. Note that the pipeline runner SA will still require creds from the users’ namespace to actually download and upload artifacts from/to objectstore. +* UI Artifact Downloading: The UI will no longer directly download artifacts. Instead, a KFP server API endpoint will provide pre-signed URL download links. Authorization for these links will be handled via RBAC. Users will require to get the artifact via resource name or can get the run the artifact is a part of, then you can generate a presigned URL to download it. A few more notes: * the Driver/Launcher communicates with the KFP API Server via the CacheClient. This has no auth mechanism today and will need to be updated. diff --git a/proposals/12147-mlmd-removal/protos/artifacts.proto b/proposals/12147-mlmd-removal/protos/artifacts.proto index 120ed3cac4e..56176b93038 100644 --- a/proposals/12147-mlmd-removal/protos/artifacts.proto +++ b/proposals/12147-mlmd-removal/protos/artifacts.proto @@ -76,6 +76,32 @@ service ArtifactService { }; } + // Creates a new artifact. + rpc CreateArtifact(CreateArtifactRequest) returns (Artifact) { + option (google.api.http) = { + post: "/apis/v2beta1/artifacts" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "create_artifact" + summary: "Creates a new artifact." + tags: "ArtifactService" + }; + } + + rpc CreateArtifactsBulk(CreateArtifactsBulkRequest) returns (CreateArtifactsBulkResponse) { + option (google.api.http) = { + post: "/apis/v2beta1/artifacts:batchCreate" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + operation_id: "batch_create_artifacts" + summary: "Creates multiple artifacts in bulk." + tags: "ArtifactService" + }; + } + + // List ArtifactTasks. rpc ListArtifactTasks(ListArtifactTasksRequest) returns (ListArtifactTasksResponse) { option (google.api.http) = { get: "/apis/v2beta1/artifact_tasks" @@ -103,15 +129,15 @@ service ArtifactService { }; } - // Creates a new artifact. - rpc CreateArtifact(CreateArtifactRequest) returns (Artifact) { + // Creates multiple artifact-task relationships in bulk. + rpc CreateArtifactTasksBulk(CreateArtifactTasksBulkRequest) returns (CreateArtifactTasksBulkResponse) { option (google.api.http) = { - post: "/apis/v2beta1/artifacts" + post: "/apis/v2beta1/artifact_tasks:batchCreate" body: "*" }; option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - operation_id: "create_artifact" - summary: "Creates a new artifact." + operation_id: "batch_create_artifact_tasks" + summary: "Creates multiple artifact-task relationships in bulk." tags: "ArtifactService" }; } @@ -124,10 +150,32 @@ message CreateArtifactRequest { // An artifact is always created in the context of a // run. string run_id = 2; + // The Task that is associated with the creation of this artifact. string task_id = 3; - ArtifactTaskType type = 4; - string producer_task_name = 5; - string producer_key = 6; + + // The outgoing parameter name of this Artifact within this task's component spec. + // For example: + // def preprocess(my_output: dsl.Outputs[dsl.Artifact]): + // ... + // here the producer_key == "my_output" + // Note that producer_task_name == task_name + string producer_key = 5; + + // If the producing task is in a parallelFor iteration + // this field designates the iteration index + optional int64 iteration_index = 6; + + IOType type = 7; +} + +message CreateArtifactsBulkRequest { + // Required. The list of artifacts to create. + repeated CreateArtifactRequest artifacts = 1; +} + +message CreateArtifactsBulkResponse { + // The list of created artifacts. + repeated Artifact artifacts = 1; } message GetArtifactRequest { @@ -180,7 +228,7 @@ message ListArtifactTasksRequest { repeated string artifact_ids = 3; // Optional. Only list artifact tasks that have artifacts of this type. - ArtifactTaskType type = 4; + IOType type = 4; string page_token = 5; int32 page_size = 6; @@ -200,11 +248,62 @@ message CreateArtifactTaskRequest { ArtifactTask artifact_task = 1; } +message CreateArtifactTasksBulkRequest { + // Required. The list of artifact-task relationships to create. + repeated ArtifactTask artifact_tasks = 1; +} + +message CreateArtifactTasksBulkResponse { + // The list of created artifact-task relationships. + repeated ArtifactTask artifact_tasks = 1; +} + // Describes the I/O relationship between -// this Artifact and Task -enum ArtifactTaskType { - INPUT = 0; - OUTPUT = 1; +// this Artifacts & Parameters and Task +// There are a couple of instances where +// input/outputs have special types such +// as in the case of LoopArguments or +// dsl.Collected outputs. +enum IOType { + // For validation + UNSPECIFIED = 0; + // This is used for inputs that are + // provided via default parameters in + // the component input definitions + COMPONENT_DEFAULT_INPUT = 1; + // The name seems convoluted, but this aligns with the + // sdk naming in TaskInputsSpec.kind.task_output_parameter + // and TaskInputsSpec.kind.task_output_artifact + TASK_OUTPUT_INPUT = 2; + COMPONENT_INPUT = 3; + RUNTIME_VALUE_INPUT = 4; + // Used for dsl.Collected + // Usage of this type indicates that all + // Artifacts within the IOArtifact.artifacts + // are inputs collected from sub tasks with + // ITERATOR_OUTPUT outputs. + COLLECTED_INPUTS = 5; + // In a for loop task, introduced via ParallelFor, this type + // is used to indicate whether this resolved input belongs + // to a parameterIterator or artifactIterator. + // In such a case the "artifacts" field for IOArtifact.artifacts + // is the list of resolved items for this parallelFor. + ITERATOR_INPUT = 6; + // Raw Iterator inputs have no producer + ITERATOR_INPUT_RAW = 7; + // When an output is produced by a Runtime Iteration Task + // This value is use to differentiate between standard inputs + ITERATOR_OUTPUT = 8; + OUTPUT = 9; + ONE_OF_OUTPUT = 10; + TASK_FINAL_STATUS_OUTPUT = 11; +} + +message IOProducer { + string task_name = 1; + // When a source is from an iteration Runtime + // task type inside a ParallelFor + optional int64 iteration = 2; } message ArtifactTask { @@ -213,37 +312,9 @@ message ArtifactTask { string artifact_id = 2; string run_id = 3; string task_id = 4; - ArtifactTaskType type = 5; - - // The task that produced this artifact - // For example in the case of a pipeline channel - // that is an output artifact you might have as - // input something like the following in the IR: - // taskOutputArtifact: - // outputArtifactKey: output_dataset - // producerTask: create-dataset - // These fields are used to track this lineage. - // - // For outputs, the producer task is the component name - // of the task that produced the artifact. - string producer_task_name = 6; - // The key is often the parameter name used - // as input/output on the component, but - // can also take on the value of other values. - // For example: - // * "param-#" when using parameters in a ParallelFor - // * "Output" when using Pythonic Artifacts - // - // For outputs, the key is the name of the parameter - // in the component spec (found in OutputDefinitions) - // used to output the artifact. - string producer_key = 7; - - // The parameter name for the input/output artifact - // This maybe the same as the Artifact name if the - // artifact name is not specified. It is used to - // resolve artifact pipeline channels. - string artifact_key = 8; + IOType type = 5; + IOProducer producer = 6; + string key = 7; } // Note to be confused with RuntimeArtifact in pipelinespec @@ -294,4 +365,4 @@ message Artifact { google.protobuf.Timestamp created_at = 8; string namespace = 9; -} +} \ No newline at end of file diff --git a/proposals/12147-mlmd-removal/protos/runs.json b/proposals/12147-mlmd-removal/protos/runs.json index bf33f227bd4..91d9ecd83f7 100644 --- a/proposals/12147-mlmd-removal/protos/runs.json +++ b/proposals/12147-mlmd-removal/protos/runs.json @@ -110,19 +110,7 @@ "child_tasks": [ { "task_id": "pipeline-j9t66-382940577", - "name": "child_task_name", - "pods": [ - { - "name": "child_task_pod1", - "uid" : "some_uid_b", - "type": "EXECUTOR" - }, - { - "name": "child_task_pod2", - "uid" : "some_uid_b", - "type": "DRIVER" - } - ] + "name": "child_task_name" } ], "type": "LOOP", diff --git a/proposals/12147-mlmd-removal/protos/runs.proto b/proposals/12147-mlmd-removal/protos/runs.proto index f91d6282b30..0184ca2bcda 100644 --- a/proposals/12147-mlmd-removal/protos/runs.proto +++ b/proposals/12147-mlmd-removal/protos/runs.proto @@ -105,13 +105,14 @@ message PipelineTaskDetail { // Name of the corresponding pod assigned by the orchestration engine. // Also known as node_id. enum TaskPodType { - DRIVER = 0; - EXECUTOR = 1; + UNSPECIFIED = 0; + DRIVER = 1; + EXECUTOR = 2; } message TaskPod { string name = 1; string uid = 2; - string type = 3; + TaskPodType type = 3; } repeated TaskPod pods = 5; @@ -127,25 +128,63 @@ message PipelineTaskDetail { google.protobuf.Timestamp end_time = 9; // Runtime state of a Task - RuntimeState status = 10; + enum TaskState { + // Default value. This value is not used. + RUNTIME_STATE_UNSPECIFIED = 0; - // Custom status metadata, this can be used to provide - // additional status info for a given task during runtime - map status_metadata = 11; + // Entity execution is in progress. + RUNNING = 1; + + // Entity completed successfully. + SUCCEEDED = 2; + + // Entity has been skipped. For example, due to caching. + SKIPPED = 3; + + // Entity execution has failed. + FAILED = 4; + + CACHED = 5; + } + TaskState state = 10; + + + message StatusMetadata { + // KFP Backend will populate this field with error messages + // if any are available on a Failed task. + string message = 1; + // Custom status metadata, this can be used to provide + // additional status info for a given task during runtime + // This is currently not utilized by KFP backend. + map custom_properties = 2; + } + StatusMetadata status_metadata = 11; + + // Timestamped representation of a Task state with an optional error. + message TaskStatus { + google.protobuf.Timestamp update_time = 1; + TaskState state = 2; + google.rpc.Status error = 3; + } // A sequence of task statuses. This field keeps a record // of state transitions. - repeated RuntimeStatus state_history = 12; + repeated TaskStatus state_history = 12; enum TaskType { // Root task replaces Root Execution, it is the top ancestor task to all tasks in the pipeline run ROOT = 0; RUNTIME = 1; + // Condition Branch is the wrapper If block CONDITION_BRANCH = 2; + // Condition is an individual if branch (this feels counter intuitive but this is how it's named in the SDK IR) + // and we are consistent with the naming here. CONDITION = 3; + // Task Group for Condition Branches + // Task Group for Loop Iterations LOOP = 4; - LOOP_ITERATION = 5; - EXIT_HANDLER = 6; + EXIT_HANDLER = 5; + IMPORTER = 6; // Generic DAG task type for types like Nested Pipelines // where there is no declarative way to detect this within // a driver. @@ -154,10 +193,10 @@ message PipelineTaskDetail { TaskType type = 13; message TypeAttributes { - // Optional. Applies to type LOOP_ITERATION - int64 iteration_index = 1; + // Optional. Applies to type Runtime that is an iteration + optional int64 iteration_index = 1; // Optional. Applies to type LOOP - int64 iteration_count = 2; + optional int64 iteration_count = 2; } TypeAttributes type_attributes = 14; @@ -168,63 +207,55 @@ message PipelineTaskDetail { // ID of the parent task if the task is within a component scope. // Empty if the task is at the root level. - string parent_task_id = 16; + optional string parent_task_id = 16; // A dependent task that requires this one to succeed. // Represented by either task_id or pod_name. + // TODO(HumairAK): Do we need this if we have parent_task_id? message ChildTask { // System-generated ID of a task. string task_id = 1; - string name = 2; - - // Name of the corresponding pod assigned by the orchestration engine. - // Also known as node_id. - repeated TaskPod pods = 3; } // Sequence of dependent tasks. repeated ChildTask child_tasks = 17; message InputOutputs { - - message IOProducer { - string task_name = 1; - // This would be the equivalent of output_parameter_key from the upstream task - // when it's a parameter input, or output_artifact_key when it is an Artifact. - string key = 2; + message IOParameter { + google.protobuf.Value value = 1; + IOType type = 2; + string parameter_key = 3; + // This field is optional because in the case of + // Input RuntimeValues, ComponentDefaultInputs, + // and Raw Iterator Input there are no producers. + optional IOProducer producer = 4; } - message Parameter { - string value = 1; - // Optional, this is only included on Runtime Tasks when the parameter name is known. - optional string name = 2; - // Not all Parameters have task producers, - // For example they can also be Runtime Constants. - // Whereas in the case of a PipelineChannel, they - // do have a producer. - optional IOProducer producer = 3; - } + // Align structure with Executor Input message IOArtifact { - // Optional, this is only included on Runtime Tasks when the parameter name is known. - string parameter_name = 1; - - Artifact value = 2; - - // All IO artifacts have a producer, so the following - // fields are required. In the case of importer - // where the artifact is set to reimport = true - // the name & key are importer-[0-9]+ and "artifact" - IOProducer producer = 3; + repeated Artifact artifacts = 1; + IOType type = 2; + string artifact_key = 3; + IOProducer producer = 4; } - repeated Parameter parameters = 1; - // Output Only. To create Artifacts for a task are created - // via ArtifactTasks. + // For Loops parameters are filled with resolved + // parameterIterator.items + repeated IOParameter parameters = 1; + + // Output Only. To create Artifacts for a task use + // ArtifactTasks to link artifacts to tasks. repeated IOArtifact artifacts = 2; } - InputOutputs inputs = 18; InputOutputs outputs = 19; + + // The scope of this task within the + // pipeline spec. Each entry represents + // either a Dag Task or a Container task. + // Note that Container task will are + // always the last entry in a scope_path. + repeated string scope_path = 20; } @@ -237,7 +268,6 @@ message Run { string pipeline_version_id = 19; repeated PipelineTaskDetail tasks = 20; - // Either remove or deprecate this RunDetails run_details = 15; } diff --git a/test_data/compiled-workflows/add_numbers.yaml b/test_data/compiled-workflows/add_numbers.yaml index 72219a4cb9e..3ce83118067 100644 --- a/test_data/compiled-workflows/add_numbers.yaml +++ b/test_data/compiled-workflows/add_numbers.yaml @@ -41,8 +41,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -69,6 +69,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -78,13 +91,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -106,6 +123,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -135,6 +160,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -145,6 +174,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -184,6 +216,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -208,8 +247,8 @@ spec: value: '{{workflow.parameters.implementations-a06e49d55601ed1e78432721afac56c959e05f0346dc650f749ccab33c0e425e}}' - name: task-name value: add-numbers - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: add-numbers-driver template: system-container-driver - arguments: @@ -224,7 +263,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -240,8 +279,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -252,8 +291,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -266,6 +305,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -275,6 +327,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -284,8 +340,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -294,9 +350,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -305,6 +361,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -319,8 +383,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/arguments-parameters.yaml b/test_data/compiled-workflows/arguments-parameters.yaml index 8a6ae6faa98..da41a66eec9 100644 --- a/test_data/compiled-workflows/arguments-parameters.yaml +++ b/test_data/compiled-workflows/arguments-parameters.yaml @@ -32,8 +32,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -60,6 +60,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -69,13 +82,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -97,6 +114,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -126,6 +151,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -136,6 +165,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -175,6 +207,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -199,8 +238,8 @@ spec: value: '{{workflow.parameters.implementations-e3bf4dafebca73c53759f2310029cb3fc65ab6a05d870069f7c58096ff7bb483}}' - name: task-name value: echo - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: echo-driver template: system-container-driver - arguments: @@ -215,7 +254,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -231,8 +270,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -243,8 +282,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -257,6 +296,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -266,6 +318,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -275,8 +331,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -285,9 +341,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -296,6 +352,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -310,8 +374,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/arguments.pipeline.yaml b/test_data/compiled-workflows/arguments.pipeline.yaml index 8a6ae6faa98..da41a66eec9 100644 --- a/test_data/compiled-workflows/arguments.pipeline.yaml +++ b/test_data/compiled-workflows/arguments.pipeline.yaml @@ -32,8 +32,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -60,6 +60,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -69,13 +82,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -97,6 +114,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -126,6 +151,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -136,6 +165,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -175,6 +207,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -199,8 +238,8 @@ spec: value: '{{workflow.parameters.implementations-e3bf4dafebca73c53759f2310029cb3fc65ab6a05d870069f7c58096ff7bb483}}' - name: task-name value: echo - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: echo-driver template: system-container-driver - arguments: @@ -215,7 +254,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -231,8 +270,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -243,8 +282,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -257,6 +296,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -266,6 +318,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -275,8 +331,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -285,9 +341,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -296,6 +352,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -310,8 +374,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/artifact_cache.yaml b/test_data/compiled-workflows/artifact_cache.yaml index ae3297850df..0d11f1ae138 100644 --- a/test_data/compiled-workflows/artifact_cache.yaml +++ b/test_data/compiled-workflows/artifact_cache.yaml @@ -60,8 +60,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -88,6 +88,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -97,13 +110,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -125,6 +142,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -154,6 +179,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -164,6 +193,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -203,6 +235,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -227,8 +266,8 @@ spec: value: '{{workflow.parameters.implementations-8e952934307908424012b1f26f02dd613c9508616be5a191f53bbb92ea5ff518}}' - name: task-name value: core-comp - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: core-comp-driver template: system-container-driver - arguments: @@ -243,7 +282,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-core outputs: {} @@ -259,8 +298,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -271,8 +310,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -285,6 +324,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -294,6 +346,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -303,8 +359,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -313,9 +369,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -324,14 +380,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-core}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-core"},"taskInfo":{"name":"core"}}' - name: task-name @@ -340,8 +404,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.core-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.core-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.core-driver.outputs.parameters.condition}}' depends: core-driver.Succeeded @@ -349,7 +413,7 @@ spec: template: comp-core inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-mantle outputs: {} @@ -365,8 +429,8 @@ spec: value: '{{workflow.parameters.implementations-f7a5dac7766188990a50c9769a1a548cdf1cf99da8f89bbad299b3175f7e8141}}' - name: task-name value: crust-comp - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: mantle.Succeeded name: crust-comp-driver template: system-container-driver @@ -384,8 +448,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-mantle}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-mantle"},"taskInfo":{"name":"mantle"}}' - name: task-name @@ -394,8 +458,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.mantle-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.mantle-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.mantle-driver.outputs.parameters.condition}}' depends: mantle-driver.Succeeded @@ -403,7 +467,7 @@ spec: template: comp-mantle inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -421,8 +485,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/artifact_crust.yaml b/test_data/compiled-workflows/artifact_crust.yaml index 35c2bf4351c..97e92c96b8e 100644 --- a/test_data/compiled-workflows/artifact_crust.yaml +++ b/test_data/compiled-workflows/artifact_crust.yaml @@ -60,8 +60,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -88,6 +88,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -97,13 +110,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -125,6 +142,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -154,6 +179,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -164,6 +193,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -203,6 +235,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -227,8 +266,8 @@ spec: value: '{{workflow.parameters.implementations-8e952934307908424012b1f26f02dd613c9508616be5a191f53bbb92ea5ff518}}' - name: task-name value: core-comp - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: core-comp-driver template: system-container-driver - arguments: @@ -243,7 +282,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-core outputs: {} @@ -259,8 +298,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -271,8 +310,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -285,6 +324,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -294,6 +346,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -303,8 +359,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -313,9 +369,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -324,14 +380,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-core}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{},"componentRef":{"name":"comp-core"},"taskInfo":{"name":"core"}}' - name: task-name @@ -340,8 +404,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.core-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.core-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.core-driver.outputs.parameters.condition}}' depends: core-driver.Succeeded @@ -349,7 +413,7 @@ spec: template: comp-core inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-mantle outputs: {} @@ -365,8 +429,8 @@ spec: value: '{{workflow.parameters.implementations-f7a5dac7766188990a50c9769a1a548cdf1cf99da8f89bbad299b3175f7e8141}}' - name: task-name value: crust-comp - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: mantle.Succeeded name: crust-comp-driver template: system-container-driver @@ -384,8 +448,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-mantle}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{},"componentRef":{"name":"comp-mantle"},"taskInfo":{"name":"mantle"}}' - name: task-name @@ -394,8 +458,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.mantle-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.mantle-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.mantle-driver.outputs.parameters.condition}}' depends: mantle-driver.Succeeded @@ -403,7 +467,7 @@ spec: template: comp-mantle inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -421,8 +485,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/artifacts_complex.yaml b/test_data/compiled-workflows/artifacts_complex.yaml index 824b4699e7c..750c5fc0f85 100644 --- a/test_data/compiled-workflows/artifacts_complex.yaml +++ b/test_data/compiled-workflows/artifacts_complex.yaml @@ -86,8 +86,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -114,6 +114,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -123,13 +136,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -151,6 +168,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -180,6 +205,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -190,6 +219,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -229,6 +261,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -253,8 +292,8 @@ spec: value: '{{workflow.parameters.implementations-16f08981216475ab27b039fb96f85c515876ffd8fd48dc99c163ad67ab8791c2}}' - name: task-name value: add - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: add-driver template: system-container-driver - arguments: @@ -277,8 +316,8 @@ spec: value: '{{workflow.parameters.implementations-16f08981216475ab27b039fb96f85c515876ffd8fd48dc99c163ad67ab8791c2}}' - name: task-name value: add-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: add-2-driver template: system-container-driver - arguments: @@ -301,8 +340,8 @@ spec: value: '{{workflow.parameters.implementations-0f454994223bfba6e739848f3c0233b8b8af4c822d95fddc776ea487b22500f0}}' - name: task-name value: add-two-ints - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: add.Succeeded && add-2.Succeeded name: add-two-ints-driver template: system-container-driver @@ -318,7 +357,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-add-two-lists-of-datasets outputs: {} @@ -334,8 +373,8 @@ spec: value: '{{workflow.parameters.implementations-7d8f01a93fab448a938303bbb205e2cf7bfadf50bc37698ff2784aebf5363d47}}' - name: task-name value: double-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: double-2-driver template: system-container-driver - arguments: @@ -350,7 +389,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-5 outputs: {} @@ -366,8 +405,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -378,8 +417,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -392,6 +431,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -401,6 +453,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -410,8 +466,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -420,9 +476,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -431,67 +487,49 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-5}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-5"},"inputs":{"parameters":{"pipelinechannel--loop-item-param-3":{"componentInputParameter":"pipelinechannel--loop-item-param-3"},"pipelinechannel--threshold":{"componentInputParameter":"pipelinechannel--threshold"}}},"taskInfo":{"name":"condition-5"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--loop-item-param-3'']) \u003e= int(inputs.parameter_values[''pipelinechannel--threshold''])"}}' - name: task-name value: condition-5 + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: condition-5-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-5-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-5-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-5-driver.outputs.parameters.condition}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: condition-5-driver.Succeeded name: condition-5 template: comp-condition-5 when: '{{tasks.condition-5-driver.outputs.parameters.condition}} != false' inputs: parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-4 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-4}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-4"},"inputs":{"parameters":{"pipelinechannel--threshold":{"componentInputParameter":"pipelinechannel--threshold"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[1, - 2, 3]"}},"taskInfo":{"name":"for-loop-4"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-4 - inputs: - parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-4-iteration + name: comp-for-loop-4 outputs: {} - dag: tasks: @@ -499,27 +537,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-4}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-4"},"inputs":{"parameters":{"pipelinechannel--threshold":{"componentInputParameter":"pipelinechannel--threshold"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[1, 2, 3]"}},"taskInfo":{"name":"for-loop-4"}}' + - name: task-name + value: for-loop-4 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-4-iteration + template: comp-for-loop-4 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-4-for-loop-4-iterator outputs: {} @@ -535,8 +575,10 @@ spec: value: '{{workflow.parameters.implementations-7d8f01a93fab448a938303bbb205e2cf7bfadf50bc37698ff2784aebf5363d47}}' - name: task-name value: double - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: double-driver template: system-container-driver - arguments: @@ -546,51 +588,25 @@ spec: - default: "false" name: cached-decision value: '{{tasks.double-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: double-driver.Succeeded name: double template: system-container-executor - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: for-loop-4 - template: comp-for-loop-4-for-loop-4-iterator - inputs: - parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-2 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: iteration-index value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--threshold":{"componentInputParameter":"threshold"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, - 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-2 + name: for-loop-4 + template: comp-for-loop-4-for-loop-4-iterator inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-2-iteration + name: comp-for-loop-2 outputs: {} - dag: tasks: @@ -598,27 +614,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--threshold":{"componentInputParameter":"threshold"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + - name: task-name + value: for-loop-2 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-2-iteration + template: comp-for-loop-2 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-2-for-loop-2-iterator outputs: {} @@ -628,8 +646,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-add-two-lists-of-datasets}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-two-lists-of-datasets"},"dependentTasks":["for-loop-2"],"inputs":{"artifacts":{"in_datasets1":{"taskOutputArtifact":{"outputArtifactKey":"pipelinechannel--double-out_dataset","producerTask":"for-loop-2"}},"in_datasets2":{"taskOutputArtifact":{"outputArtifactKey":"pipelinechannel--double-2-out_dataset","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"add-two-lists-of-datasets"}}' - name: task-name @@ -639,8 +657,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.add-two-lists-of-datasets-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.add-two-lists-of-datasets-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.add-two-lists-of-datasets-driver.outputs.parameters.condition}}' depends: add-two-lists-of-datasets-driver.Succeeded @@ -648,13 +666,13 @@ spec: template: comp-add-two-lists-of-datasets - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-2 template: comp-for-loop-2-for-loop-2-iterator inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -672,8 +690,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/artifacts_simple.yaml b/test_data/compiled-workflows/artifacts_simple.yaml index c63c37aa716..f55b8e7ea13 100644 --- a/test_data/compiled-workflows/artifacts_simple.yaml +++ b/test_data/compiled-workflows/artifacts_simple.yaml @@ -71,8 +71,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -99,6 +99,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -108,13 +121,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -136,6 +153,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -165,6 +190,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -175,6 +204,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -214,6 +246,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -238,8 +277,10 @@ spec: value: '{{workflow.parameters.implementations-0242e2764b7c7b68c6cabf3e21fd30e2eebe9df2d233c19bf4b4c103b3ff7560}}' - name: task-name value: double - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: double-driver template: system-container-driver - arguments: @@ -249,12 +290,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.double-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: double-driver.Succeeded name: double template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-2 outputs: {} @@ -270,8 +314,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -282,8 +326,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -296,6 +340,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -305,6 +362,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -314,8 +375,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -324,9 +385,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -335,64 +396,43 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, - 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-2 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-2-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + - name: task-name + value: for-loop-2 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-2-iteration + template: comp-for-loop-2 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-2-for-loop-2-iterator outputs: {} @@ -408,8 +448,8 @@ spec: value: '{{workflow.parameters.implementations-fe3df0f6fa90face22ffbe395cfab1dc3052f996d7af9c457c592a156a228e9c}}' - name: task-name value: add - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: for-loop-2.Succeeded name: add-driver template: system-container-driver @@ -433,8 +473,8 @@ spec: value: '{{workflow.parameters.implementations-10160d6ed5205864573d7fc5ab62e00d8b050cf985d80b0bbaae43ba7d32c6c4}}' - name: task-name value: add-container - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: for-loop-2.Succeeded name: add-container-driver template: system-container-driver @@ -450,13 +490,13 @@ spec: template: system-container-executor - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-2 template: comp-for-loop-2-for-loop-2-iterator inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -474,8 +514,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/collected_artifacts.yaml b/test_data/compiled-workflows/collected_artifacts.yaml index dd8a3f82615..1798884d2c3 100644 --- a/test_data/compiled-workflows/collected_artifacts.yaml +++ b/test_data/compiled-workflows/collected_artifacts.yaml @@ -152,8 +152,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -180,6 +180,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -189,13 +202,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -217,6 +234,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -246,6 +271,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -256,6 +285,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -295,6 +327,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -319,8 +358,8 @@ spec: value: '{{workflow.parameters.implementations-acfe6b8e77458ff7c9eb4a10a23639b58364c808c1c39d8c4c1235861d099dbf}}' - name: task-name value: create-dataset - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: create-dataset-driver template: system-container-driver - arguments: @@ -335,7 +374,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-single-node-dag outputs: {} @@ -351,8 +390,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -363,8 +402,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -377,6 +416,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -386,6 +438,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -395,8 +451,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -405,9 +461,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -416,6 +472,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -428,8 +492,10 @@ spec: value: '{{workflow.parameters.implementations-f1c1eb97565294e29b1a6409078e07ca9d3302c9ccdacd7751eaa032a7d99f43}}' - name: task-name value: read-single-dataset-generate-model - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: single-node-dag.Succeeded name: read-single-dataset-generate-model-driver template: system-container-driver @@ -440,6 +506,8 @@ spec: - default: "false" name: cached-decision value: '{{tasks.read-single-dataset-generate-model-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: read-single-dataset-generate-model-driver.Succeeded name: read-single-dataset-generate-model template: system-container-executor @@ -447,58 +515,33 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-single-node-dag}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{},"componentRef":{"name":"comp-single-node-dag"},"inputs":{"parameters":{"char":{"componentInputParameter":"pipelinechannel--split-chars-Output-loop-item"}}},"taskInfo":{"name":"single-node-dag"}}' - name: task-name value: single-node-dag + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: single-node-dag-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.single-node-dag-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.single-node-dag-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.single-node-dag-driver.outputs.parameters.condition}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: single-node-dag-driver.Succeeded name: single-node-dag template: comp-single-node-dag inputs: parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-2 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--split-chars-Output":{"componentInputParameter":"pipelinechannel--split-chars-Output"},"pipelinechannel--split-ids-Output-loop-item":{"componentInputParameter":"pipelinechannel--split-ids-Output-loop-item"}}},"parameterIterator":{"itemInput":"pipelinechannel--split-chars-Output-loop-item","items":{"inputParameter":"pipelinechannel--split-chars-Output"}},"taskInfo":{"name":"for-loop-2"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-2 - inputs: - parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-2-iteration + name: comp-for-loop-2 outputs: {} - dag: tasks: @@ -506,26 +549,28 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--split-chars-Output":{"componentInputParameter":"pipelinechannel--split-chars-Output"},"pipelinechannel--split-ids-Output-loop-item":{"componentInputParameter":"pipelinechannel--split-ids-Output-loop-item"}}},"parameterIterator":{"itemInput":"pipelinechannel--split-chars-Output-loop-item","items":{"inputParameter":"pipelinechannel--split-chars-Output"}},"taskInfo":{"name":"for-loop-2"}}' + - name: task-name + value: for-loop-2 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-2-iteration + template: comp-for-loop-2 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-2-for-loop-2-iterator outputs: {} @@ -541,8 +586,10 @@ spec: value: '{{workflow.parameters.implementations-ec1f774a9791d5bf8f1f6f2d765e693d12aa053c0068a46d18df151f5a9a8545}}' - name: task-name value: create-file - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: create-file-driver template: system-container-driver - arguments: @@ -552,13 +599,17 @@ spec: - default: "false" name: cached-decision value: '{{tasks.create-file-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: create-file-driver.Succeeded name: create-file template: system-container-executor - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: for-loop-2 template: comp-for-loop-2-for-loop-2-iterator - arguments: @@ -571,8 +622,10 @@ spec: value: '{{workflow.parameters.implementations-d4b6b9e1cff17ea3ca93d63109e65319f3c20876c7867400587cb270fceed458}}' - name: task-name value: read-datasets - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: for-loop-2.Succeeded name: read-datasets-driver template: system-container-driver @@ -583,6 +636,8 @@ spec: - default: "false" name: cached-decision value: '{{tasks.read-datasets-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: read-datasets-driver.Succeeded name: read-datasets template: system-container-executor @@ -596,8 +651,10 @@ spec: value: '{{workflow.parameters.implementations-36399431fc5940f221467d3cd76e824ebecf7385e9634fda1cf2b1ded43ffbe3}}' - name: task-name value: read-models - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: for-loop-2.Succeeded name: read-models-driver template: system-container-driver @@ -608,6 +665,8 @@ spec: - default: "false" name: cached-decision value: '{{tasks.read-models-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: read-models-driver.Succeeded name: read-models template: system-container-executor @@ -621,8 +680,10 @@ spec: value: '{{workflow.parameters.implementations-cb38e5430ba66f0db667b439264d51cf1757fcf432fc9ea4fe8e38fbd468cd1a}}' - name: task-name value: read-single-file - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: create-file.Succeeded name: read-single-file-driver template: system-container-driver @@ -633,44 +694,17 @@ spec: - default: "false" name: cached-decision value: '{{tasks.read-single-file-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: read-single-file-driver.Succeeded name: read-single-file template: system-container-executor inputs: parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-1 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-1}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-1"},"dependentTasks":["split-chars","split-ids"],"inputs":{"parameters":{"pipelinechannel--split-chars-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-chars"}},"pipelinechannel--split-ids-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-ids"}}}},"parameterIterator":{"itemInput":"pipelinechannel--split-ids-Output-loop-item","items":{"inputParameter":"pipelinechannel--split-ids-Output"}},"taskInfo":{"name":"for-loop-1"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-1 - inputs: - parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-1-iteration + name: comp-for-loop-1 outputs: {} - dag: tasks: @@ -678,26 +712,28 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-1"},"dependentTasks":["split-chars","split-ids"],"inputs":{"parameters":{"pipelinechannel--split-chars-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-chars"}},"pipelinechannel--split-ids-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-ids"}}}},"parameterIterator":{"itemInput":"pipelinechannel--split-ids-Output-loop-item","items":{"inputParameter":"pipelinechannel--split-ids-Output"}},"taskInfo":{"name":"for-loop-1"}}' + - name: task-name + value: for-loop-1 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-1-iteration + template: comp-for-loop-1 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-1-for-loop-1-iterator outputs: {} @@ -705,8 +741,8 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: split-chars.Succeeded && split-ids.Succeeded name: for-loop-1 template: comp-for-loop-1-for-loop-1-iterator @@ -720,8 +756,8 @@ spec: value: '{{workflow.parameters.implementations-be1feef4ec8e00ce2e55f9604db9c6b0cca05dcfeaf633230ca6bf0723babac8}}' - name: task-name value: split-chars - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: split-chars-driver template: system-container-driver - arguments: @@ -744,8 +780,8 @@ spec: value: '{{workflow.parameters.implementations-e8b2565c05c3de8bb0b7d77062539f34be122f1b8169f7198d40d58499d26d4e}}' - name: task-name value: split-ids - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: split-ids-driver template: system-container-driver - arguments: @@ -760,7 +796,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-collecting-artifacts outputs: {} @@ -770,8 +806,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-collecting-artifacts}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{},"componentRef":{"name":"comp-collecting-artifacts"},"inputs":{"parameters":{"model_chars":{"runtimeValue":{"constant":"x,y,z"}},"model_ids":{"runtimeValue":{"constant":"s1,s2,s3"}}}},"taskInfo":{"name":"collecting-artifacts"}}' - name: task-name @@ -780,8 +816,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.collecting-artifacts-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.collecting-artifacts-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.collecting-artifacts-driver.outputs.parameters.condition}}' depends: collecting-artifacts-driver.Succeeded @@ -797,8 +833,8 @@ spec: value: '{{workflow.parameters.implementations-36399431fc5940f221467d3cd76e824ebecf7385e9634fda1cf2b1ded43ffbe3}}' - name: task-name value: read-models - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: collecting-artifacts.Succeeded name: read-models-driver template: system-container-driver @@ -814,7 +850,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -832,8 +868,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/collected_parameters.yaml b/test_data/compiled-workflows/collected_parameters.yaml index ad98c7df191..46510f93470 100644 --- a/test_data/compiled-workflows/collected_parameters.yaml +++ b/test_data/compiled-workflows/collected_parameters.yaml @@ -87,8 +87,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -115,6 +115,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -124,13 +137,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -152,6 +169,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -181,6 +206,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -191,6 +220,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -230,6 +262,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -254,8 +293,10 @@ spec: value: '{{workflow.parameters.implementations-7afbc0576341694fb223cf141107254be822a3b82ec9ee880741d0440246a0c0}}' - name: task-name value: consume-single-id - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: prepend-id.Succeeded name: consume-single-id-driver template: system-container-driver @@ -266,6 +307,8 @@ spec: - default: "false" name: cached-decision value: '{{tasks.consume-single-id-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: consume-single-id-driver.Succeeded name: consume-single-id template: system-container-executor @@ -279,8 +322,10 @@ spec: value: '{{workflow.parameters.implementations-579756e400e5e7eb1e9d6fabfb7a8e87a8971dbe30dcfb3fee4b6bf02148944a}}' - name: task-name value: prepend-id - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: prepend-id-driver template: system-container-driver - arguments: @@ -290,12 +335,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.prepend-id-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: prepend-id-driver.Succeeded name: prepend-id template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-1 outputs: {} @@ -311,8 +359,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -323,8 +371,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -337,6 +385,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -346,6 +407,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -355,8 +420,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -365,9 +430,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -376,62 +441,42 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-1}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-1"},"dependentTasks":["split-ids"],"inputs":{"parameters":{"pipelinechannel--split-ids-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-ids"}}}},"parameterIterator":{"itemInput":"pipelinechannel--split-ids-Output-loop-item","items":{"inputParameter":"pipelinechannel--split-ids-Output"}},"taskInfo":{"name":"for-loop-1"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-1 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-1-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-1"},"dependentTasks":["split-ids"],"inputs":{"parameters":{"pipelinechannel--split-ids-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-ids"}}}},"parameterIterator":{"itemInput":"pipelinechannel--split-ids-Output-loop-item","items":{"inputParameter":"pipelinechannel--split-ids-Output"}},"taskInfo":{"name":"for-loop-1"}}' + - name: task-name + value: for-loop-1 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-1-iteration + template: comp-for-loop-1 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-1-for-loop-1-iterator outputs: {} @@ -447,8 +492,8 @@ spec: value: '{{workflow.parameters.implementations-690594f98b930e924d092cdddb1c8379f51c5ea407e8ff519feae9d7947be1c2}}' - name: task-name value: consume-ids - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: for-loop-1.Succeeded name: consume-ids-driver template: system-container-driver @@ -464,8 +509,8 @@ spec: template: system-container-executor - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: split-ids.Succeeded name: for-loop-1 template: comp-for-loop-1-for-loop-1-iterator @@ -479,8 +524,8 @@ spec: value: '{{workflow.parameters.implementations-675759a85fbbe43a31184fa3998f5ad3d5231281f0a7b9bea7eae605a3f8f570}}' - name: task-name value: split-ids - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: split-ids-driver template: system-container-driver - arguments: @@ -495,7 +540,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-collecting-parameters outputs: {} @@ -505,8 +550,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-collecting-parameters}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{},"componentRef":{"name":"comp-collecting-parameters"},"inputs":{"parameters":{"model_ids":{"runtimeValue":{"constant":"s1,s2,s3"}}}},"taskInfo":{"name":"collecting-parameters"}}' - name: task-name @@ -515,8 +560,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.collecting-parameters-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.collecting-parameters-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.collecting-parameters-driver.outputs.parameters.condition}}' depends: collecting-parameters-driver.Succeeded @@ -532,8 +577,8 @@ spec: value: '{{workflow.parameters.implementations-690594f98b930e924d092cdddb1c8379f51c5ea407e8ff519feae9d7947be1c2}}' - name: task-name value: consume-ids - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: collecting-parameters.Succeeded name: consume-ids-driver template: system-container-driver @@ -549,7 +594,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -567,8 +612,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/component_with_metadata_fields.yaml b/test_data/compiled-workflows/component_with_metadata_fields.yaml index 958191d7308..1e0fc2493da 100644 --- a/test_data/compiled-workflows/component_with_metadata_fields.yaml +++ b/test_data/compiled-workflows/component_with_metadata_fields.yaml @@ -56,8 +56,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -84,6 +84,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -93,13 +106,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -121,6 +138,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -150,6 +175,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -160,6 +189,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -199,6 +231,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -223,8 +262,8 @@ spec: value: '{{workflow.parameters.implementations-e64230eee42e89b7bed44c01e0e5aadf92c2b123bf1ae03372c1e07eeadee7ff}}' - name: task-name value: dataset-joiner - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: dataset-joiner-driver template: system-container-driver - arguments: @@ -239,7 +278,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -255,8 +294,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -267,8 +306,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -281,6 +320,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -290,6 +342,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -299,8 +355,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -309,9 +365,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -320,6 +376,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -334,8 +398,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/component_with_optional_inputs.yaml b/test_data/compiled-workflows/component_with_optional_inputs.yaml index 2fb5d515521..102483f568d 100644 --- a/test_data/compiled-workflows/component_with_optional_inputs.yaml +++ b/test_data/compiled-workflows/component_with_optional_inputs.yaml @@ -45,8 +45,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -73,6 +73,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -82,13 +95,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -110,6 +127,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -139,6 +164,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -149,6 +178,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -188,6 +220,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -212,8 +251,8 @@ spec: value: '{{workflow.parameters.implementations-8a37febc0bf19dffb0c33aa550ff8996f9e410b3f2b4c7f8f527b0c40618bb22}}' - name: task-name value: component-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-op-driver template: system-container-driver - arguments: @@ -228,7 +267,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -244,8 +283,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -256,8 +295,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -270,6 +309,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -279,6 +331,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -288,8 +344,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -298,9 +354,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -309,6 +365,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -323,8 +387,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/component_with_pip_index_urls.yaml b/test_data/compiled-workflows/component_with_pip_index_urls.yaml index dcacdb44a4f..1574813b1d4 100644 --- a/test_data/compiled-workflows/component_with_pip_index_urls.yaml +++ b/test_data/compiled-workflows/component_with_pip_index_urls.yaml @@ -43,8 +43,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -71,6 +71,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -80,13 +93,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -108,6 +125,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -137,6 +162,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -147,6 +176,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -186,6 +218,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -210,8 +249,8 @@ spec: value: '{{workflow.parameters.implementations-47cfdcdf2aba86202a9b65c5f3a504c58eeb1fbd3e74c26e64e10f9fd0f3b8b9}}' - name: task-name value: component-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-op-driver template: system-container-driver - arguments: @@ -226,7 +265,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -242,8 +281,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -254,8 +293,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -268,6 +307,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -277,6 +329,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -286,8 +342,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -296,9 +352,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -307,6 +363,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -321,8 +385,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/component_with_pip_install.yaml b/test_data/compiled-workflows/component_with_pip_install.yaml index 2ac9f4910e6..2d763f3dc4a 100644 --- a/test_data/compiled-workflows/component_with_pip_install.yaml +++ b/test_data/compiled-workflows/component_with_pip_install.yaml @@ -43,8 +43,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -71,6 +71,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -80,13 +93,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -108,6 +125,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -137,6 +162,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -147,6 +176,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -186,6 +218,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -210,8 +249,8 @@ spec: value: '{{workflow.parameters.implementations-76f86a3dc9bc2983c5f9c392d26e715eac83f100d1612e8841cab55696a13ec4}}' - name: task-name value: component-with-pip-install - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-with-pip-install-driver template: system-container-driver - arguments: @@ -226,7 +265,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -242,8 +281,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -254,8 +293,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -268,6 +307,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -277,6 +329,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -286,8 +342,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -296,9 +352,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -307,6 +363,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -321,8 +385,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/component_with_pip_install_in_venv.yaml b/test_data/compiled-workflows/component_with_pip_install_in_venv.yaml index 16c1d98b3a8..573eda1b08b 100644 --- a/test_data/compiled-workflows/component_with_pip_install_in_venv.yaml +++ b/test_data/compiled-workflows/component_with_pip_install_in_venv.yaml @@ -44,8 +44,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -72,6 +72,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -81,13 +94,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -109,6 +126,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -138,6 +163,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -148,6 +177,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -187,6 +219,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -211,8 +250,8 @@ spec: value: '{{workflow.parameters.implementations-153629da06fc36cdcd8f3c6e00a4b975ee79bcb08aac69fd0c9e23e727c0e838}}' - name: task-name value: component-with-pip-install - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-with-pip-install-driver template: system-container-driver - arguments: @@ -227,7 +266,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -243,8 +282,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -255,8 +294,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -269,6 +308,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -278,6 +330,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -287,8 +343,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -297,9 +353,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -308,6 +364,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -322,8 +386,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/components_with_optional_artifacts.yaml b/test_data/compiled-workflows/components_with_optional_artifacts.yaml index 546b32b3897..d3f59d73b60 100644 --- a/test_data/compiled-workflows/components_with_optional_artifacts.yaml +++ b/test_data/compiled-workflows/components_with_optional_artifacts.yaml @@ -57,8 +57,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -85,6 +85,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -94,13 +107,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -122,6 +139,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -151,6 +176,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -161,6 +190,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -200,6 +232,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -224,8 +263,8 @@ spec: value: '{{workflow.parameters.implementations-538891c59305f39fa7d0dc686fc88b88affbb35a10fd4ada0fe6e7b5e9136e97}}' - name: task-name value: python-artifact-printer - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: python-artifact-printer-driver template: system-container-driver - arguments: @@ -240,7 +279,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-inner-pipeline outputs: {} @@ -256,8 +295,8 @@ spec: value: '{{workflow.parameters.implementations-538891c59305f39fa7d0dc686fc88b88affbb35a10fd4ada0fe6e7b5e9136e97}}' - name: task-name value: python-artifact-printer - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: python-artifact-printer-driver template: system-container-driver - arguments: @@ -272,7 +311,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-inner-pipeline-2 outputs: {} @@ -280,26 +319,20 @@ spec: args: - --executor_type - importer - - --task_spec - - '{{inputs.parameters.task}}' - - --component_spec - - '{{inputs.parameters.component}}' + - --task_name + - '{{inputs.parameters.task-name}}' - --importer_spec - '{{inputs.parameters.importer}}' - --pipeline_name - optional-artifact-pipeline - --run_id - '{{workflow.uid}}' - - --parent_dag_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --pod_name - $(KFP_POD_NAME) - --pod_uid - $(KFP_POD_UID) - - --mlmd_server_address - - $(METADATA_GRPC_SERVICE_HOST) - - --mlmd_server_port - - $(METADATA_GRPC_SERVICE_PORT) command: - launcher-v2 env: @@ -311,6 +344,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -324,15 +361,26 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - - name: task - - name: component + - name: task-name - name: importer - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: system-importer outputs: {} + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - container: args: - --type @@ -345,8 +393,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -357,8 +405,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -371,6 +419,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -380,6 +441,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -389,8 +454,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -399,9 +464,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -410,6 +475,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -422,8 +495,8 @@ spec: value: '{{workflow.parameters.implementations-950ab2ff916de191d4701d45acbc5dce7cd1edca6fa6b0226245176343e4af97}}' - name: task-name value: custom-artifact-printer - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: custom-artifact-printer-driver template: system-container-driver - arguments: @@ -446,8 +519,8 @@ spec: value: '{{workflow.parameters.implementations-950ab2ff916de191d4701d45acbc5dce7cd1edca6fa6b0226245176343e4af97}}' - name: task-name value: custom-artifact-printer-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: custom-artifact-printer-2-driver template: system-container-driver - arguments: @@ -462,22 +535,20 @@ spec: template: system-container-executor - arguments: parameters: - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"uri":{"runtimeValue":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"}}}},"taskInfo":{"name":"importer"}}' - - name: component - value: '{{workflow.parameters.components-comp-importer}}' + - name: task-name + value: importer - name: importer value: '{{workflow.parameters.implementations-comp-importer}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: importer template: system-importer - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-inner-pipeline}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-inner-pipeline"},"dependentTasks":["importer"],"inputs":{"artifacts":{"dataset":{"taskOutputArtifact":{"outputArtifactKey":"artifact","producerTask":"importer"}}}},"taskInfo":{"name":"inner-pipeline"}}' - name: task-name @@ -487,8 +558,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.inner-pipeline-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.inner-pipeline-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.inner-pipeline-driver.outputs.parameters.condition}}' depends: inner-pipeline-driver.Succeeded @@ -498,8 +569,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-inner-pipeline-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-inner-pipeline-2"},"taskInfo":{"name":"inner-pipeline-2"}}' - name: task-name @@ -508,8 +579,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.inner-pipeline-2-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.inner-pipeline-2-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.inner-pipeline-2-driver.outputs.parameters.condition}}' depends: inner-pipeline-2-driver.Succeeded @@ -517,7 +588,7 @@ spec: template: comp-inner-pipeline-2 inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -535,8 +606,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/concat_message.yaml b/test_data/compiled-workflows/concat_message.yaml index 5f78700b94a..a9953af1d27 100644 --- a/test_data/compiled-workflows/concat_message.yaml +++ b/test_data/compiled-workflows/concat_message.yaml @@ -42,8 +42,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -70,6 +70,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -79,13 +92,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -107,6 +124,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -136,6 +161,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -146,6 +175,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -185,6 +217,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -209,8 +248,8 @@ spec: value: '{{workflow.parameters.implementations-5738fa6a8034d33cd840d7fed5885fd1870a2c63f8e79df3adc50a53e949c021}}' - name: task-name value: concat-message - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: concat-message-driver template: system-container-driver - arguments: @@ -225,7 +264,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -241,8 +280,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -253,8 +292,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -267,6 +306,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -276,6 +328,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -285,8 +341,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -295,9 +351,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -306,6 +362,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -320,8 +384,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/conditional_producer_and_consumers.yaml b/test_data/compiled-workflows/conditional_producer_and_consumers.yaml index de6da4fcd9a..f827b5e510b 100644 --- a/test_data/compiled-workflows/conditional_producer_and_consumers.yaml +++ b/test_data/compiled-workflows/conditional_producer_and_consumers.yaml @@ -63,8 +63,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -91,6 +91,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -100,13 +113,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -128,6 +145,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -157,6 +182,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -167,6 +196,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -206,6 +238,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -230,8 +269,8 @@ spec: value: '{{workflow.parameters.implementations-db8097682cf93da6b46f7ade69b360e7b202276c350b83e4bbf5da6625c7f8c3}}' - name: task-name value: add - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: add-driver template: system-container-driver - arguments: @@ -246,7 +285,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-4 outputs: {} @@ -262,8 +301,8 @@ spec: value: '{{workflow.parameters.implementations-f79f58ed4b630aadad448078027429b1d8053e45ed52bed5961317a8e3ebf5e3}}' - name: task-name value: double - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: double-driver template: system-container-driver - arguments: @@ -278,7 +317,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-3 outputs: {} @@ -294,8 +333,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -306,8 +345,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -320,6 +359,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -329,6 +381,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -338,8 +394,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -348,9 +404,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -359,67 +415,49 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-3}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-3"},"inputs":{"parameters":{"pipelinechannel--loop-item-param-1":{"componentInputParameter":"pipelinechannel--loop-item-param-1"},"pipelinechannel--threshold":{"componentInputParameter":"pipelinechannel--threshold"}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--loop-item-param-1'']) \u003e= int(inputs.parameter_values[''pipelinechannel--threshold''])"}}' - name: task-name value: condition-3 + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: condition-3-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-3-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-3-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-3-driver.outputs.parameters.condition}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: condition-3-driver.Succeeded name: condition-3 template: comp-condition-3 when: '{{tasks.condition-3-driver.outputs.parameters.condition}} != false' inputs: parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-2 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--threshold":{"componentInputParameter":"threshold"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, - 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-2 - inputs: - parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-2-iteration + name: comp-for-loop-2 outputs: {} - dag: tasks: @@ -427,27 +465,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--threshold":{"componentInputParameter":"threshold"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + - name: task-name + value: for-loop-2 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-2-iteration + template: comp-for-loop-2 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-2-for-loop-2-iterator outputs: {} @@ -457,8 +497,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-4}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-4"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"pipelinechannel--for-loop-2-pipelinechannel--double-Output":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-Output","producerTask":"for-loop-2"}},"pipelinechannel--threshold":{"componentInputParameter":"threshold"}}},"taskInfo":{"name":"condition-4"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--threshold'']) == 2"}}' @@ -469,8 +509,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-4-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-4-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-4-driver.outputs.parameters.condition}}' depends: condition-4-driver.Succeeded @@ -479,13 +519,13 @@ spec: when: '{{tasks.condition-4-driver.outputs.parameters.condition}} != false' - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-2 template: comp-for-loop-2-for-loop-2-iterator inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -503,8 +543,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/container_component_with_no_inputs.yaml b/test_data/compiled-workflows/container_component_with_no_inputs.yaml index 988c29f1c95..1686dca9a27 100644 --- a/test_data/compiled-workflows/container_component_with_no_inputs.yaml +++ b/test_data/compiled-workflows/container_component_with_no_inputs.yaml @@ -32,8 +32,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -60,6 +60,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -69,13 +82,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -97,6 +114,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -126,6 +151,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -136,6 +165,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -175,6 +207,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -199,8 +238,8 @@ spec: value: '{{workflow.parameters.implementations-38c5a29d4ca763681a94a05486f63b9b66ef520343be3e17ca8cd892801aa093}}' - name: task-name value: hello-world-container - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: hello-world-container-driver template: system-container-driver - arguments: @@ -215,7 +254,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -231,8 +270,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -243,8 +282,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -257,6 +296,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -266,6 +318,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -275,8 +331,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -285,9 +341,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -296,6 +352,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -310,8 +374,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/container_io.yaml b/test_data/compiled-workflows/container_io.yaml index 8153c4eeded..60bfa869080 100644 --- a/test_data/compiled-workflows/container_io.yaml +++ b/test_data/compiled-workflows/container_io.yaml @@ -32,8 +32,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -60,6 +60,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -69,13 +82,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -97,6 +114,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -126,6 +151,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -136,6 +165,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -175,6 +207,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -199,8 +238,8 @@ spec: value: '{{workflow.parameters.implementations-685d68a4c481be1028be8fd6b26bb6eb8f637d4d7984d7a9f1a767bdafbc0275}}' - name: task-name value: container-io - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: container-io-driver template: system-container-driver - arguments: @@ -215,7 +254,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -231,8 +270,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -243,8 +282,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -257,6 +296,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -266,6 +318,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -275,8 +331,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -285,9 +341,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -296,6 +352,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -310,8 +374,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/container_no_input.yaml b/test_data/compiled-workflows/container_no_input.yaml index c6016ecf100..7e5aea76599 100644 --- a/test_data/compiled-workflows/container_no_input.yaml +++ b/test_data/compiled-workflows/container_no_input.yaml @@ -32,8 +32,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -60,6 +60,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -69,13 +82,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -97,6 +114,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -126,6 +151,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -136,6 +165,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -175,6 +207,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -199,8 +238,8 @@ spec: value: '{{workflow.parameters.implementations-38c5a29d4ca763681a94a05486f63b9b66ef520343be3e17ca8cd892801aa093}}' - name: task-name value: container-no-input - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: container-no-input-driver template: system-container-driver - arguments: @@ -215,7 +254,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -231,8 +270,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -243,8 +282,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -257,6 +296,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -266,6 +318,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -275,8 +331,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -285,9 +341,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -296,6 +352,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -310,8 +374,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/container_with_artifact_output.yaml b/test_data/compiled-workflows/container_with_artifact_output.yaml index 6927cdf73a7..8408ad320a0 100644 --- a/test_data/compiled-workflows/container_with_artifact_output.yaml +++ b/test_data/compiled-workflows/container_with_artifact_output.yaml @@ -32,8 +32,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -60,6 +60,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -69,13 +82,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -97,6 +114,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -126,6 +151,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -136,6 +165,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -175,6 +207,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -199,8 +238,8 @@ spec: value: '{{workflow.parameters.implementations-f8e3101206918b913f3b1307bd1caa3d193be077324b20aa8fb05607de411069}}' - name: task-name value: container-with-artifact-output - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: container-with-artifact-output-driver template: system-container-driver - arguments: @@ -215,7 +254,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -231,8 +270,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -243,8 +282,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -257,6 +296,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -266,6 +318,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -275,8 +331,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -285,9 +341,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -296,6 +352,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -310,8 +374,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/container_with_concat_placeholder.yaml b/test_data/compiled-workflows/container_with_concat_placeholder.yaml index def6e2f6868..c99fa851dad 100644 --- a/test_data/compiled-workflows/container_with_concat_placeholder.yaml +++ b/test_data/compiled-workflows/container_with_concat_placeholder.yaml @@ -33,8 +33,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -61,6 +61,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -70,13 +83,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -98,6 +115,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -127,6 +152,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -137,6 +166,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -176,6 +208,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -200,8 +239,8 @@ spec: value: '{{workflow.parameters.implementations-0f31c7277c0aefdce4b0699271966a6e11bb78bb3df17d5dc950a54509d02679}}' - name: task-name value: container-with-concat-placeholder - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: container-with-concat-placeholder-driver template: system-container-driver - arguments: @@ -216,7 +255,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -232,8 +271,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -244,8 +283,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -258,6 +297,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -267,6 +319,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -276,8 +332,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -286,9 +342,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -297,6 +353,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -311,8 +375,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/container_with_if_placeholder.yaml b/test_data/compiled-workflows/container_with_if_placeholder.yaml index 07b6bfcea51..77d4b81782e 100644 --- a/test_data/compiled-workflows/container_with_if_placeholder.yaml +++ b/test_data/compiled-workflows/container_with_if_placeholder.yaml @@ -35,8 +35,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -63,6 +63,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -72,13 +85,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -100,6 +117,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -129,6 +154,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -139,6 +168,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -178,6 +210,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -202,8 +241,8 @@ spec: value: '{{workflow.parameters.implementations-d97b94290cc248e70e263031ba3a32938896cb7df7937b14caf50d8967631256}}' - name: task-name value: container-with-if-placeholder - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: container-with-if-placeholder-driver template: system-container-driver - arguments: @@ -218,7 +257,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -234,8 +273,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -246,8 +285,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -260,6 +299,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -269,6 +321,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -278,8 +334,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -288,9 +344,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -299,6 +355,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -313,8 +377,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/container_with_placeholder_in_fstring.yaml b/test_data/compiled-workflows/container_with_placeholder_in_fstring.yaml index 2742a7d49b7..60522ea088b 100644 --- a/test_data/compiled-workflows/container_with_placeholder_in_fstring.yaml +++ b/test_data/compiled-workflows/container_with_placeholder_in_fstring.yaml @@ -32,8 +32,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -60,6 +60,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -69,13 +82,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -97,6 +114,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -126,6 +151,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -136,6 +165,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -175,6 +207,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -199,8 +238,8 @@ spec: value: '{{workflow.parameters.implementations-79db8839d07686ed239d9057400d121034026b267540e3591bf32d0e1237cb52}}' - name: task-name value: container-with-placeholder-in-fstring - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: container-with-placeholder-in-fstring-driver template: system-container-driver - arguments: @@ -215,7 +254,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -231,8 +270,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -243,8 +282,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -257,6 +296,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -266,6 +318,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -275,8 +331,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -285,9 +341,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -296,6 +352,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -310,8 +374,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/containerized_python_component.yaml b/test_data/compiled-workflows/containerized_python_component.yaml index eacab4d8c67..bed3bc8bc8f 100644 --- a/test_data/compiled-workflows/containerized_python_component.yaml +++ b/test_data/compiled-workflows/containerized_python_component.yaml @@ -32,8 +32,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -60,6 +60,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -69,13 +82,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -97,6 +114,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -126,6 +151,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -136,6 +165,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -175,6 +207,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -199,8 +238,8 @@ spec: value: '{{workflow.parameters.implementations-f5a84f596803cacaf4013e9a5ef593b3f9fbe93b7f0f58618112e8ab9541f693}}' - name: task-name value: concat-message - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: concat-message-driver template: system-container-driver - arguments: @@ -215,7 +254,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -231,8 +270,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -243,8 +282,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -257,6 +296,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -266,6 +318,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -275,8 +331,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -285,9 +341,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -296,6 +352,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -310,8 +374,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/create_pod_metadata_complex.yaml b/test_data/compiled-workflows/create_pod_metadata_complex.yaml index 5d4ab18da5c..64d7b922a09 100644 --- a/test_data/compiled-workflows/create_pod_metadata_complex.yaml +++ b/test_data/compiled-workflows/create_pod_metadata_complex.yaml @@ -85,8 +85,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -113,6 +113,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -122,13 +135,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -150,6 +167,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -179,6 +204,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -189,6 +218,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -228,6 +260,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -287,6 +326,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -297,6 +340,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -347,6 +393,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -400,6 +453,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -410,6 +467,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -456,6 +516,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -480,8 +547,8 @@ spec: value: '{{workflow.parameters.implementations-ddf5c44b745eff77bd01b2badb8eb0ad37a114f732779104288f59e67701512f}}' - name: task-name value: validate-no-pod-metadata - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: validate-no-pod-metadata-driver template: system-container-driver - arguments: @@ -504,8 +571,8 @@ spec: value: '{{workflow.parameters.implementations-c18d45fda14ac35e1b8c0d82357e972f4d416ae8d9ce79aef5cd5f31cb0e7735}}' - name: task-name value: validate-pod-metadata - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: kubernetes-config value: '{{workflow.parameters.kubernetes-comp-validate-pod-metadata}}' name: validate-pod-metadata-driver @@ -542,8 +609,8 @@ spec: value: '{{workflow.parameters.implementations-c18d45fda14ac35e1b8c0d82357e972f4d416ae8d9ce79aef5cd5f31cb0e7735}}' - name: task-name value: validate-pod-metadata-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: kubernetes-config value: '{{workflow.parameters.kubernetes-comp-validate-pod-metadata-2}}' name: validate-pod-metadata-2-driver @@ -568,7 +635,7 @@ spec: template: metadata-2-0-system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -584,8 +651,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -596,8 +663,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -610,6 +677,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -619,6 +699,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -628,8 +712,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -638,9 +722,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -649,6 +733,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -663,8 +755,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/cross_loop_after_topology.yaml b/test_data/compiled-workflows/cross_loop_after_topology.yaml index 4f3b1699329..5a86365c863 100644 --- a/test_data/compiled-workflows/cross_loop_after_topology.yaml +++ b/test_data/compiled-workflows/cross_loop_after_topology.yaml @@ -62,8 +62,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -90,6 +90,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -99,13 +112,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -127,6 +144,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -156,6 +181,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -166,6 +195,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -205,6 +237,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -229,8 +268,10 @@ spec: value: '{{workflow.parameters.implementations-2a367e5abfe5ee4a63da037b9312bc5427fac22774eda49990b500ddc4de005b}}' - name: task-name value: print-op-5 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-op-5-driver template: system-container-driver - arguments: @@ -240,12 +281,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-op-5-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-op-5-driver.Succeeded name: print-op-5 template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-10 outputs: {} @@ -261,8 +305,10 @@ spec: value: '{{workflow.parameters.implementations-2a367e5abfe5ee4a63da037b9312bc5427fac22774eda49990b500ddc4de005b}}' - name: task-name value: print-op-7 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-op-7-driver template: system-container-driver - arguments: @@ -272,12 +318,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-op-7-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-op-7-driver.Succeeded name: print-op-7 template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-14 outputs: {} @@ -293,8 +342,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -305,8 +354,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -319,6 +368,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -328,6 +390,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -337,8 +403,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -347,9 +413,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -358,64 +424,43 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-14}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-14"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-13","items":{"raw":"[1, - 2]"}},"taskInfo":{"name":"for-loop-14"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-14 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-14-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-14}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-14"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-13","items":{"raw":"[1, 2]"}},"taskInfo":{"name":"for-loop-14"}}' + - name: task-name + value: for-loop-14 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-14-iteration + template: comp-for-loop-14 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-14-for-loop-14-iterator outputs: {} @@ -423,8 +468,10 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: for-loop-14 template: comp-for-loop-14-for-loop-14-iterator - arguments: @@ -437,8 +484,10 @@ spec: value: '{{workflow.parameters.implementations-2a367e5abfe5ee4a63da037b9312bc5427fac22774eda49990b500ddc4de005b}}' - name: task-name value: print-op-8 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: for-loop-14.Succeeded name: print-op-8-driver template: system-container-driver @@ -449,12 +498,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-op-8-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-op-8-driver.Succeeded name: print-op-8 template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-12 outputs: {} @@ -470,8 +522,10 @@ spec: value: '{{workflow.parameters.implementations-2a367e5abfe5ee4a63da037b9312bc5427fac22774eda49990b500ddc4de005b}}' - name: task-name value: print-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-op-driver template: system-container-driver - arguments: @@ -481,12 +535,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-op-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-op-driver.Succeeded name: print-op template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-2 outputs: {} @@ -502,8 +559,10 @@ spec: value: '{{workflow.parameters.implementations-2a367e5abfe5ee4a63da037b9312bc5427fac22774eda49990b500ddc4de005b}}' - name: task-name value: print-op-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-op-2-driver template: system-container-driver - arguments: @@ -513,12 +572,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-op-2-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-op-2-driver.Succeeded name: print-op-2 template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-4 outputs: {} @@ -534,8 +596,10 @@ spec: value: '{{workflow.parameters.implementations-2a367e5abfe5ee4a63da037b9312bc5427fac22774eda49990b500ddc4de005b}}' - name: task-name value: print-op-4 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-op-4-driver template: system-container-driver - arguments: @@ -545,45 +609,17 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-op-4-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-op-4-driver.Succeeded name: print-op-4 template: system-container-executor inputs: parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-8 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-8}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-8"},"dependentTasks":["print-op-3"],"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-7","items":{"raw":"[1, - 2]"}},"taskInfo":{"name":"for-loop-8"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-8 - inputs: - parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-8-iteration + name: comp-for-loop-8 outputs: {} - dag: tasks: @@ -591,27 +627,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-8}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-8"},"dependentTasks":["print-op-3"],"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-7","items":{"raw":"[1, 2]"}},"taskInfo":{"name":"for-loop-8"}}' + - name: task-name + value: for-loop-8 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-8-iteration + template: comp-for-loop-8 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-8-for-loop-8-iterator outputs: {} @@ -619,8 +657,10 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-op-3.Succeeded name: for-loop-8 template: comp-for-loop-8-for-loop-8-iterator @@ -634,8 +674,10 @@ spec: value: '{{workflow.parameters.implementations-2a367e5abfe5ee4a63da037b9312bc5427fac22774eda49990b500ddc4de005b}}' - name: task-name value: print-op-3 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-op-3-driver template: system-container-driver - arguments: @@ -645,45 +687,17 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-op-3-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-op-3-driver.Succeeded name: print-op-3 template: system-container-executor inputs: parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-6 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-10}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-10"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-9","items":{"raw":"[1, - 2]"}},"taskInfo":{"name":"for-loop-10"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-10 - inputs: - parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-10-iteration + name: comp-for-loop-6 outputs: {} - dag: tasks: @@ -691,27 +705,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-10}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-10"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-9","items":{"raw":"[1, 2]"}},"taskInfo":{"name":"for-loop-10"}}' + - name: task-name + value: for-loop-10 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-10-iteration + template: comp-for-loop-10 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-10-for-loop-10-iterator outputs: {} @@ -721,58 +737,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-12}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-12"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-11","items":{"raw":"[1, - 2]"}},"taskInfo":{"name":"for-loop-12"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-12 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-12-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-12}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-12"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-11","items":{"raw":"[1, 2]"}},"taskInfo":{"name":"for-loop-12"}}' + - name: task-name + value: for-loop-12 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-12-iteration + template: comp-for-loop-12 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-12-for-loop-12-iterator outputs: {} @@ -782,58 +769,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, - 2]"}},"taskInfo":{"name":"for-loop-2"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-2 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-2-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, 2]"}},"taskInfo":{"name":"for-loop-2"}}' + - name: task-name + value: for-loop-2 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-2-iteration + template: comp-for-loop-2 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-2-for-loop-2-iterator outputs: {} @@ -843,58 +801,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-4}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-4"},"dependentTasks":["for-loop-2"],"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[1, - 2]"}},"taskInfo":{"name":"for-loop-4"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-4 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-4-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-4}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-4"},"dependentTasks":["for-loop-2"],"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[1, 2]"}},"taskInfo":{"name":"for-loop-4"}}' + - name: task-name + value: for-loop-4 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-4-iteration + template: comp-for-loop-4 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-4-for-loop-4-iterator outputs: {} @@ -904,58 +833,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-6}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-6"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-5","items":{"raw":"[1, - 2]"}},"taskInfo":{"name":"for-loop-6"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-6 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-6-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-6}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-6"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-5","items":{"raw":"[1, 2]"}},"taskInfo":{"name":"for-loop-6"}}' + - name: task-name + value: for-loop-6 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-6-iteration + template: comp-for-loop-6 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-6-for-loop-6-iterator outputs: {} @@ -963,33 +863,33 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-10 template: comp-for-loop-10-for-loop-10-iterator - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-12 template: comp-for-loop-12-for-loop-12-iterator - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-2 template: comp-for-loop-2-for-loop-2-iterator - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: for-loop-2.Succeeded name: for-loop-4 template: comp-for-loop-4-for-loop-4-iterator - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-6 template: comp-for-loop-6-for-loop-6-iterator - arguments: @@ -1002,8 +902,8 @@ spec: value: '{{workflow.parameters.implementations-2a367e5abfe5ee4a63da037b9312bc5427fac22774eda49990b500ddc4de005b}}' - name: task-name value: print-op-6 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: for-loop-10.Succeeded name: print-op-6-driver template: system-container-driver @@ -1019,7 +919,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -1037,8 +937,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/dict_input.yaml b/test_data/compiled-workflows/dict_input.yaml index 30dc39ed68d..c8bc982a905 100644 --- a/test_data/compiled-workflows/dict_input.yaml +++ b/test_data/compiled-workflows/dict_input.yaml @@ -41,8 +41,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -69,6 +69,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -78,13 +91,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -106,6 +123,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -135,6 +160,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -145,6 +174,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -184,6 +216,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -208,8 +247,8 @@ spec: value: '{{workflow.parameters.implementations-35671413d3dd82981b2a90982070716c0f9c16927eb0c60333b097b2d5a6774b}}' - name: task-name value: dict-input - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: dict-input-driver template: system-container-driver - arguments: @@ -224,7 +263,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -240,8 +279,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -252,8 +291,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -266,6 +305,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -275,6 +327,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -284,8 +340,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -294,9 +350,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -305,6 +361,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -319,8 +383,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/embedded_artifact.yaml b/test_data/compiled-workflows/embedded_artifact.yaml index 3cc641ca925..ec3ec732faf 100644 --- a/test_data/compiled-workflows/embedded_artifact.yaml +++ b/test_data/compiled-workflows/embedded_artifact.yaml @@ -82,8 +82,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -110,6 +110,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -119,13 +132,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -147,6 +164,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -176,6 +201,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -186,6 +215,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -225,6 +257,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -249,8 +288,8 @@ spec: value: '{{workflow.parameters.implementations-ffbce4c56102eddc51d0cb2aaa352ac140f1bbf010042621313e0144a79c7a20}}' - name: task-name value: read-embedded-artifact-dir - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: read-embedded-artifact-dir-driver template: system-container-driver - arguments: @@ -273,8 +312,8 @@ spec: value: '{{workflow.parameters.implementations-51efdf1bda01221dc8fa240e519ad3100e7bd478949411c505a8925e19f655a1}}' - name: task-name value: read-embedded-artifact-file - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: read-embedded-artifact-file-driver template: system-container-driver - arguments: @@ -289,7 +328,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -305,8 +344,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -317,8 +356,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -331,6 +370,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -340,6 +392,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -349,8 +405,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -359,9 +415,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -370,6 +426,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -384,8 +448,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/env-var.yaml b/test_data/compiled-workflows/env-var.yaml index 1f48c6c6d98..c3760aa0c16 100644 --- a/test_data/compiled-workflows/env-var.yaml +++ b/test_data/compiled-workflows/env-var.yaml @@ -43,8 +43,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -71,6 +71,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -80,13 +93,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -108,6 +125,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -137,6 +162,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -147,6 +176,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -186,6 +218,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -210,8 +249,8 @@ spec: value: '{{workflow.parameters.implementations-5993ccc78137b54f60188169af3d4eed06dd024defc1904818d928e904ae14e6}}' - name: task-name value: comp - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: comp-driver template: system-container-driver - arguments: @@ -226,7 +265,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -242,8 +281,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -254,8 +293,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -268,6 +307,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -277,6 +329,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -286,8 +342,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -296,9 +352,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -307,6 +363,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -321,8 +385,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/fail_v2.yaml b/test_data/compiled-workflows/fail_v2.yaml index b025c45c82f..50a8e214d00 100644 --- a/test_data/compiled-workflows/fail_v2.yaml +++ b/test_data/compiled-workflows/fail_v2.yaml @@ -41,8 +41,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -69,6 +69,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -78,13 +91,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -106,6 +123,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -135,6 +160,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -145,6 +174,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -184,6 +216,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -208,8 +247,8 @@ spec: value: '{{workflow.parameters.implementations-29331b5087411692b16ea3f55f802cd64bc8edfcbcf0db7f6592cc1b91685487}}' - name: task-name value: fail - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: fail-driver template: system-container-driver - arguments: @@ -224,7 +263,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -240,8 +279,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -252,8 +291,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -266,6 +305,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -275,6 +327,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -284,8 +340,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -294,9 +350,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -305,6 +361,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -319,8 +383,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/flip_coin.yaml b/test_data/compiled-workflows/flip_coin.yaml index 3267c16b16b..b4af9e518b3 100644 --- a/test_data/compiled-workflows/flip_coin.yaml +++ b/test_data/compiled-workflows/flip_coin.yaml @@ -98,8 +98,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -126,6 +126,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -135,13 +148,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -163,6 +180,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -192,6 +217,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -202,6 +231,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -241,6 +273,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -267,8 +306,8 @@ spec: value: '{{workflow.parameters.implementations-6d9fd4a4d18480bfc98d1bbb174a6aaea617e354b4ed0efcc0eadc2414b1e582}}' - name: task-name value: print-msg - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-msg-driver template: system-container-driver - arguments: @@ -283,7 +322,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-2 outputs: {} @@ -301,8 +340,8 @@ spec: value: '{{workflow.parameters.implementations-6d9fd4a4d18480bfc98d1bbb174a6aaea617e354b4ed0efcc0eadc2414b1e582}}' - name: task-name value: print-msg-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-msg-2-driver template: system-container-driver - arguments: @@ -317,7 +356,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-3 outputs: {} @@ -333,8 +372,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -345,8 +384,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -359,6 +398,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -368,6 +420,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -377,8 +433,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -387,9 +443,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -398,14 +454,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-2"},"dependentTasks":["random-num"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"},"pipelinechannel--random-num-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"random-num"}}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--random-num-Output'']) \u003e 5"}}' @@ -416,8 +480,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-2-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-2-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-2-driver.outputs.parameters.condition}}' depends: condition-2-driver.Succeeded @@ -428,8 +492,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-3}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-3"},"dependentTasks":["random-num"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"},"pipelinechannel--random-num-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"random-num"}}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--random-num-Output'']) \u003c= 5"}}' @@ -440,8 +504,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-3-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-3-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-3-driver.outputs.parameters.condition}}' depends: condition-3-driver.Succeeded @@ -458,8 +522,8 @@ spec: value: '{{workflow.parameters.implementations-2c52a38e80b2a374e3ca25334ecf08a91b2565a0eb9fe88129b5d3b4392493b0}}' - name: task-name value: random-num - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: random-num-driver template: system-container-driver - arguments: @@ -474,7 +538,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-1 outputs: {} @@ -492,8 +556,8 @@ spec: value: '{{workflow.parameters.implementations-6d9fd4a4d18480bfc98d1bbb174a6aaea617e354b4ed0efcc0eadc2414b1e582}}' - name: task-name value: print-msg-3 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-msg-3-driver template: system-container-driver - arguments: @@ -508,7 +572,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-5 outputs: {} @@ -526,8 +590,8 @@ spec: value: '{{workflow.parameters.implementations-6d9fd4a4d18480bfc98d1bbb174a6aaea617e354b4ed0efcc0eadc2414b1e582}}' - name: task-name value: print-msg-4 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-msg-4-driver template: system-container-driver - arguments: @@ -542,7 +606,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-6 outputs: {} @@ -552,8 +616,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-5}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-5"},"dependentTasks":["random-num-2"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"},"pipelinechannel--random-num-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"random-num-2"}}}},"taskInfo":{"name":"condition-5"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--random-num-2-Output'']) \u003e 15"}}' @@ -564,8 +628,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-5-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-5-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-5-driver.outputs.parameters.condition}}' depends: condition-5-driver.Succeeded @@ -576,8 +640,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-6}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-6"},"dependentTasks":["random-num-2"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"},"pipelinechannel--random-num-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"random-num-2"}}}},"taskInfo":{"name":"condition-6"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--random-num-2-Output'']) \u003c= 15"}}' @@ -588,8 +652,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-6-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-6-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-6-driver.outputs.parameters.condition}}' depends: condition-6-driver.Succeeded @@ -606,8 +670,8 @@ spec: value: '{{workflow.parameters.implementations-2c52a38e80b2a374e3ca25334ecf08a91b2565a0eb9fe88129b5d3b4392493b0}}' - name: task-name value: random-num-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: random-num-2-driver template: system-container-driver - arguments: @@ -622,7 +686,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-4 outputs: {} @@ -632,8 +696,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["flip-coin"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin"}}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-Output''] == ''heads''"}}' @@ -644,8 +708,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-1-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-1-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-1-driver.outputs.parameters.condition}}' depends: condition-1-driver.Succeeded @@ -656,8 +720,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-4}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-4"},"dependentTasks":["flip-coin"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin"}}}},"taskInfo":{"name":"condition-4"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-Output''] == ''tails''"}}' @@ -668,8 +732,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-4-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-4-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-4-driver.outputs.parameters.condition}}' depends: condition-4-driver.Succeeded @@ -686,8 +750,8 @@ spec: value: '{{workflow.parameters.implementations-6175eb183c8ba935a4c42b40e65a5c3e4623a9c16b38fd5c4bbfc23275f0e9bd}}' - name: task-name value: flip-coin - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: flip-coin-driver template: system-container-driver - arguments: @@ -702,7 +766,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -720,8 +784,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/hello-world.yaml b/test_data/compiled-workflows/hello-world.yaml index c3580875556..58cac43a767 100644 --- a/test_data/compiled-workflows/hello-world.yaml +++ b/test_data/compiled-workflows/hello-world.yaml @@ -32,8 +32,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -60,6 +60,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -69,13 +82,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -97,6 +114,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -126,6 +151,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -136,6 +165,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -175,6 +207,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -199,8 +238,8 @@ spec: value: '{{workflow.parameters.implementations-cf9c81ac9e6ab0dcdd92cb89ed717317e681cb0645cb5ddfc4824b1de14346b3}}' - name: task-name value: echo - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: echo-driver template: system-container-driver - arguments: @@ -215,7 +254,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -231,8 +270,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -243,8 +282,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -257,6 +296,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -266,6 +318,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -275,8 +331,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -285,9 +341,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -296,6 +352,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -310,8 +374,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/identity.yaml b/test_data/compiled-workflows/identity.yaml index 4901bdfd87c..f7ca4dbdb62 100644 --- a/test_data/compiled-workflows/identity.yaml +++ b/test_data/compiled-workflows/identity.yaml @@ -41,8 +41,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -69,6 +69,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -78,13 +91,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -106,6 +123,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -135,6 +160,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -145,6 +174,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -184,6 +216,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -208,8 +247,8 @@ spec: value: '{{workflow.parameters.implementations-4f72b1b68ec8567b06989de2c1a51aa5a00125003bb0b1c7dd1bb2eb07970287}}' - name: task-name value: identity - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: identity-driver template: system-container-driver - arguments: @@ -224,7 +263,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -240,8 +279,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -252,8 +291,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -266,6 +305,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -275,6 +327,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -284,8 +340,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -294,9 +350,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -305,6 +361,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -319,8 +383,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/if_elif_else_complex.yaml b/test_data/compiled-workflows/if_elif_else_complex.yaml index 7f09346c05b..20f210f87c2 100644 --- a/test_data/compiled-workflows/if_elif_else_complex.yaml +++ b/test_data/compiled-workflows/if_elif_else_complex.yaml @@ -132,8 +132,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -160,6 +160,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -169,13 +182,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -197,6 +214,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -226,6 +251,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -236,6 +265,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -275,6 +307,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -300,8 +339,8 @@ spec: value: '{{workflow.parameters.implementations-cecb98cac1de5c44c981fe9bc4ca2eaa0b650231b182536eb374f37a191a09d2}}' - name: task-name value: print-and-return - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-and-return-driver template: system-container-driver - arguments: @@ -316,7 +355,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-3 outputs: {} @@ -332,8 +371,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -344,8 +383,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -358,6 +397,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -367,6 +419,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -376,8 +432,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -386,9 +442,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -397,14 +453,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-3}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-3"},"inputs":{"parameters":{"pipelinechannel--add_drumroll":{"componentInputParameter":"pipelinechannel--add_drumroll"},"pipelinechannel--trials-loop-item":{"componentInputParameter":"pipelinechannel--trials-loop-item"}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--trials-loop-item'']) == 3"}}' @@ -414,8 +478,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-3-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-3-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-3-driver.outputs.parameters.condition}}' depends: condition-3-driver.Succeeded @@ -424,7 +488,7 @@ spec: when: '{{tasks.condition-3-driver.outputs.parameters.condition}} != false' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-2 outputs: {} @@ -441,8 +505,10 @@ spec: value: '{{workflow.parameters.implementations-cecb98cac1de5c44c981fe9bc4ca2eaa0b650231b182536eb374f37a191a09d2}}' - name: task-name value: print-and-return-9 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-and-return-9-driver template: system-container-driver - arguments: @@ -452,45 +518,17 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-and-return-9-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-and-return-9-driver.Succeeded name: print-and-return-9 template: system-container-executor inputs: parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-16 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-16}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-16"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--repeat_if_lucky_number":{"componentInputParameter":"pipelinechannel--repeat_if_lucky_number"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-15","items":{"raw":"[1, - 2]"}},"taskInfo":{"name":"for-loop-16"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-16 - inputs: - parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-16-iteration + name: comp-for-loop-16 outputs: {} - dag: tasks: @@ -498,27 +536,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-16}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-16"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--repeat_if_lucky_number":{"componentInputParameter":"pipelinechannel--repeat_if_lucky_number"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-15","items":{"raw":"[1, 2]"}},"taskInfo":{"name":"for-loop-16"}}' + - name: task-name + value: for-loop-16 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-16-iteration + template: comp-for-loop-16 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-16-for-loop-16-iterator outputs: {} @@ -526,13 +566,13 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-16 template: comp-for-loop-16-for-loop-16-iterator inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-14 outputs: {} @@ -542,8 +582,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-14}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-14"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--repeat_if_lucky_number":{"componentInputParameter":"pipelinechannel--repeat_if_lucky_number"}}},"taskInfo":{"name":"condition-14"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--repeat_if_lucky_number''] == true"}}' @@ -553,8 +593,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-14-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-14-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-14-driver.outputs.parameters.condition}}' depends: condition-14-driver.Succeeded @@ -572,8 +612,8 @@ spec: value: '{{workflow.parameters.implementations-cecb98cac1de5c44c981fe9bc4ca2eaa0b650231b182536eb374f37a191a09d2}}' - name: task-name value: print-and-return-8 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-and-return-8-driver template: system-container-driver - arguments: @@ -588,7 +628,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-13 outputs: {} @@ -605,8 +645,8 @@ spec: value: '{{workflow.parameters.implementations-cecb98cac1de5c44c981fe9bc4ca2eaa0b650231b182536eb374f37a191a09d2}}' - name: task-name value: print-and-return-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-and-return-2-driver template: system-container-driver - arguments: @@ -621,7 +661,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-6 outputs: {} @@ -638,8 +678,8 @@ spec: value: '{{workflow.parameters.implementations-cecb98cac1de5c44c981fe9bc4ca2eaa0b650231b182536eb374f37a191a09d2}}' - name: task-name value: print-and-return-3 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-and-return-3-driver template: system-container-driver - arguments: @@ -654,7 +694,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-7 outputs: {} @@ -664,8 +704,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-6}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-6"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--is-even-or-odd-Output":{"componentInputParameter":"pipelinechannel--is-even-or-odd-Output"}}},"taskInfo":{"name":"condition-6"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--is-even-or-odd-Output''] == ''even''"}}' @@ -675,8 +715,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-6-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-6-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-6-driver.outputs.parameters.condition}}' depends: condition-6-driver.Succeeded @@ -687,8 +727,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-7}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-7"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--is-even-or-odd-Output":{"componentInputParameter":"pipelinechannel--is-even-or-odd-Output"}}},"taskInfo":{"name":"condition-7"},"triggerPolicy":{"condition":"!(inputs.parameter_values[''pipelinechannel--is-even-or-odd-Output''] == ''even'')"}}' @@ -698,8 +738,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-7-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-7-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-7-driver.outputs.parameters.condition}}' depends: condition-7-driver.Succeeded @@ -708,7 +748,7 @@ spec: when: '{{tasks.condition-7-driver.outputs.parameters.condition}} != false' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-branches-5 outputs: {} @@ -718,8 +758,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-branches-5}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-branches-5"},"dependentTasks":["is-even-or-odd"],"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--is-even-or-odd-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"is-even-or-odd"}}}},"taskInfo":{"name":"condition-branches-5"}}' - name: task-name @@ -729,8 +769,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-branches-5-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-branches-5-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-branches-5-driver.outputs.parameters.condition}}' depends: condition-branches-5-driver.Succeeded @@ -746,8 +786,8 @@ spec: value: '{{workflow.parameters.implementations-9f03385cde2d0dd36c4de829527d6ece0aa8acc96cfc09cd66b2f670fb6c12eb}}' - name: task-name value: is-even-or-odd - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: is-even-or-odd-driver template: system-container-driver - arguments: @@ -770,8 +810,8 @@ spec: value: '{{workflow.parameters.implementations-cecb98cac1de5c44c981fe9bc4ca2eaa0b650231b182536eb374f37a191a09d2}}' - name: task-name value: print-and-return-4 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: condition-branches-5.Succeeded name: print-and-return-4-driver template: system-container-driver @@ -787,7 +827,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-8 outputs: {} @@ -804,8 +844,8 @@ spec: value: '{{workflow.parameters.implementations-cecb98cac1de5c44c981fe9bc4ca2eaa0b650231b182536eb374f37a191a09d2}}' - name: task-name value: print-and-return-5 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-and-return-5-driver template: system-container-driver - arguments: @@ -820,7 +860,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-11 outputs: {} @@ -837,8 +877,8 @@ spec: value: '{{workflow.parameters.implementations-cecb98cac1de5c44c981fe9bc4ca2eaa0b650231b182536eb374f37a191a09d2}}' - name: task-name value: print-and-return-6 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-and-return-6-driver template: system-container-driver - arguments: @@ -853,7 +893,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-12 outputs: {} @@ -863,8 +903,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-11}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-11"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--is-even-or-odd-2-Output":{"componentInputParameter":"pipelinechannel--is-even-or-odd-2-Output"}}},"taskInfo":{"name":"condition-11"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--is-even-or-odd-2-Output''] == ''even''"}}' @@ -874,8 +914,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-11-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-11-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-11-driver.outputs.parameters.condition}}' depends: condition-11-driver.Succeeded @@ -886,8 +926,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-12}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-12"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--is-even-or-odd-2-Output":{"componentInputParameter":"pipelinechannel--is-even-or-odd-2-Output"}}},"taskInfo":{"name":"condition-12"},"triggerPolicy":{"condition":"!(inputs.parameter_values[''pipelinechannel--is-even-or-odd-2-Output''] == ''even'')"}}' @@ -897,8 +937,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-12-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-12-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-12-driver.outputs.parameters.condition}}' depends: condition-12-driver.Succeeded @@ -907,7 +947,7 @@ spec: when: '{{tasks.condition-12-driver.outputs.parameters.condition}} != false' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-branches-10 outputs: {} @@ -917,8 +957,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-branches-10}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-branches-10"},"dependentTasks":["is-even-or-odd-2"],"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--is-even-or-odd-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"is-even-or-odd-2"}}}},"taskInfo":{"name":"condition-branches-10"}}' - name: task-name @@ -928,8 +968,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-branches-10-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-branches-10-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-branches-10-driver.outputs.parameters.condition}}' depends: condition-branches-10-driver.Succeeded @@ -945,8 +985,8 @@ spec: value: '{{workflow.parameters.implementations-9f03385cde2d0dd36c4de829527d6ece0aa8acc96cfc09cd66b2f670fb6c12eb}}' - name: task-name value: is-even-or-odd-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: is-even-or-odd-2-driver template: system-container-driver - arguments: @@ -969,8 +1009,8 @@ spec: value: '{{workflow.parameters.implementations-cecb98cac1de5c44c981fe9bc4ca2eaa0b650231b182536eb374f37a191a09d2}}' - name: task-name value: print-and-return-7 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: condition-branches-10.Succeeded name: print-and-return-7-driver template: system-container-driver @@ -986,7 +1026,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-9 outputs: {} @@ -996,8 +1036,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-13}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-13"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"},"pipelinechannel--repeat_if_lucky_number":{"componentInputParameter":"pipelinechannel--repeat_if_lucky_number"}}},"taskInfo":{"name":"condition-13"},"triggerPolicy":{"condition":"!(int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) \u003c 5000) \u0026\u0026 !(int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) @@ -1008,8 +1048,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-13-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-13-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-13-driver.outputs.parameters.condition}}' depends: condition-13-driver.Succeeded @@ -1020,8 +1060,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-8}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-8"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"}}},"taskInfo":{"name":"condition-8"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) \u003c 5000"}}' @@ -1031,8 +1071,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-8-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-8-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-8-driver.outputs.parameters.condition}}' depends: condition-8-driver.Succeeded @@ -1043,8 +1083,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-9}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-9"},"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"componentInputParameter":"pipelinechannel--int-0-to-9999-Output"}}},"taskInfo":{"name":"condition-9"},"triggerPolicy":{"condition":"!(int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) \u003c 5000) \u0026\u0026 int(inputs.parameter_values[''pipelinechannel--int-0-to-9999-Output'']) @@ -1055,8 +1095,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-9-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-9-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-9-driver.outputs.parameters.condition}}' depends: condition-9-driver.Succeeded @@ -1065,7 +1105,7 @@ spec: when: '{{tasks.condition-9-driver.outputs.parameters.condition}} != false' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-branches-4 outputs: {} @@ -1075,21 +1115,25 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-2"},"inputs":{"parameters":{"pipelinechannel--add_drumroll":{"componentInputParameter":"pipelinechannel--add_drumroll"},"pipelinechannel--trials-loop-item":{"componentInputParameter":"pipelinechannel--trials-loop-item"}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--add_drumroll''] == true"}}' - name: task-name value: condition-2 + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: condition-2-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-2-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-2-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-2-driver.outputs.parameters.condition}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: condition-2-driver.Succeeded name: condition-2 template: comp-condition-2 @@ -1098,21 +1142,25 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-branches-4}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-branches-4"},"dependentTasks":["int-0-to-9999"],"inputs":{"parameters":{"pipelinechannel--int-0-to-9999-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"int-0-to-9999"}},"pipelinechannel--repeat_if_lucky_number":{"componentInputParameter":"pipelinechannel--repeat_if_lucky_number"}}},"taskInfo":{"name":"condition-branches-4"}}' - name: task-name value: condition-branches-4 + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: int-0-to-9999.Succeeded name: condition-branches-4-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-branches-4-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-branches-4-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-branches-4-driver.outputs.parameters.condition}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: condition-branches-4-driver.Succeeded name: condition-branches-4 template: comp-condition-branches-4 @@ -1126,8 +1174,10 @@ spec: value: '{{workflow.parameters.implementations-037286a2481e6e06b20c68a503b3734f6d588a2e617f51124ada32d3bfbaebc9}}' - name: task-name value: int-0-to-9999 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: int-0-to-9999-driver template: system-container-driver - arguments: @@ -1137,44 +1187,17 @@ spec: - default: "false" name: cached-decision value: '{{tasks.int-0-to-9999-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: int-0-to-9999-driver.Succeeded name: int-0-to-9999 template: system-container-executor inputs: parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-1 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-1}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--add_drumroll":{"componentInputParameter":"add_drumroll"},"pipelinechannel--repeat_if_lucky_number":{"componentInputParameter":"repeat_if_lucky_number"},"pipelinechannel--trials":{"componentInputParameter":"trials"}}},"parameterIterator":{"itemInput":"pipelinechannel--trials-loop-item","items":{"inputParameter":"pipelinechannel--trials"}},"taskInfo":{"name":"for-loop-1"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-1 - inputs: - parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-1-iteration + name: comp-for-loop-1 outputs: {} - dag: tasks: @@ -1182,26 +1205,28 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--add_drumroll":{"componentInputParameter":"add_drumroll"},"pipelinechannel--repeat_if_lucky_number":{"componentInputParameter":"repeat_if_lucky_number"},"pipelinechannel--trials":{"componentInputParameter":"trials"}}},"parameterIterator":{"itemInput":"pipelinechannel--trials-loop-item","items":{"inputParameter":"pipelinechannel--trials"}},"taskInfo":{"name":"for-loop-1"}}' + - name: task-name + value: for-loop-1 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-1-iteration + template: comp-for-loop-1 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-1-for-loop-1-iterator outputs: {} @@ -1209,8 +1234,8 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-1 template: comp-for-loop-1-for-loop-1-iterator - arguments: @@ -1223,8 +1248,8 @@ spec: value: '{{workflow.parameters.implementations-7eb3c92dc3692d5ff7c6a3fee6f5d875746948a93fc517922b5ce5d9c8fb8e41}}' - name: task-name value: print-ints - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: for-loop-1.Succeeded name: print-ints-driver template: system-container-driver @@ -1240,7 +1265,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -1258,8 +1283,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/if_elif_else_with_oneof_parameters.yaml b/test_data/compiled-workflows/if_elif_else_with_oneof_parameters.yaml index d523def9741..fedaaa16849 100644 --- a/test_data/compiled-workflows/if_elif_else_with_oneof_parameters.yaml +++ b/test_data/compiled-workflows/if_elif_else_with_oneof_parameters.yaml @@ -87,8 +87,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -115,6 +115,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -124,13 +137,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -152,6 +169,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -181,6 +206,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -191,6 +220,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -230,6 +262,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -255,8 +294,8 @@ spec: value: '{{workflow.parameters.implementations-cecb98cac1de5c44c981fe9bc4ca2eaa0b650231b182536eb374f37a191a09d2}}' - name: task-name value: print-and-return - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-and-return-driver template: system-container-driver - arguments: @@ -271,7 +310,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-2 outputs: {} @@ -288,8 +327,8 @@ spec: value: '{{workflow.parameters.implementations-cecb98cac1de5c44c981fe9bc4ca2eaa0b650231b182536eb374f37a191a09d2}}' - name: task-name value: print-and-return-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-and-return-2-driver template: system-container-driver - arguments: @@ -304,7 +343,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-3 outputs: {} @@ -320,8 +359,8 @@ spec: value: '{{workflow.parameters.implementations-8db1b3d3b7e6823165c17afdab16d298bdef05326ec345e8eded255ee61b4c30}}' - name: task-name value: special-print-and-return - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: special-print-and-return-driver template: system-container-driver - arguments: @@ -336,7 +375,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-4 outputs: {} @@ -352,8 +391,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -364,8 +403,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -378,6 +417,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -387,6 +439,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -396,8 +452,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -406,9 +462,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -417,14 +473,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-2"},"inputs":{"parameters":{"pipelinechannel--flip-three-sided-die-Output":{"componentInputParameter":"pipelinechannel--flip-three-sided-die-Output"}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] == ''heads''"}}' @@ -434,8 +498,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-2-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-2-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-2-driver.outputs.parameters.condition}}' depends: condition-2-driver.Succeeded @@ -446,8 +510,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-3}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-3"},"inputs":{"parameters":{"pipelinechannel--flip-three-sided-die-Output":{"componentInputParameter":"pipelinechannel--flip-three-sided-die-Output"}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"!(inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] == ''heads'') \u0026\u0026 inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] @@ -458,8 +522,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-3-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-3-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-3-driver.outputs.parameters.condition}}' depends: condition-3-driver.Succeeded @@ -470,8 +534,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-4}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-4"},"inputs":{"parameters":{"pipelinechannel--flip-three-sided-die-Output":{"componentInputParameter":"pipelinechannel--flip-three-sided-die-Output"}}},"taskInfo":{"name":"condition-4"},"triggerPolicy":{"condition":"!(inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] == ''heads'') \u0026\u0026 !(inputs.parameter_values[''pipelinechannel--flip-three-sided-die-Output''] @@ -482,8 +546,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-4-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-4-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-4-driver.outputs.parameters.condition}}' depends: condition-4-driver.Succeeded @@ -492,7 +556,7 @@ spec: when: '{{tasks.condition-4-driver.outputs.parameters.condition}} != false' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-branches-1 outputs: {} @@ -502,8 +566,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-branches-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-branches-1"},"dependentTasks":["flip-three-sided-die"],"inputs":{"parameters":{"pipelinechannel--flip-three-sided-die-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-three-sided-die"}}}},"taskInfo":{"name":"condition-branches-1"}}' - name: task-name @@ -513,8 +577,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-branches-1-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-branches-1-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-branches-1-driver.outputs.parameters.condition}}' depends: condition-branches-1-driver.Succeeded @@ -530,8 +594,8 @@ spec: value: '{{workflow.parameters.implementations-4c0f0fdecdf01e223e03a16bfbb999078ae2981e08cc552cfc2460f569d6ff5b}}' - name: task-name value: flip-three-sided-die - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: flip-three-sided-die-driver template: system-container-driver - arguments: @@ -546,7 +610,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-roll-die-pipeline outputs: {} @@ -562,8 +626,8 @@ spec: value: '{{workflow.parameters.implementations-cecb98cac1de5c44c981fe9bc4ca2eaa0b650231b182536eb374f37a191a09d2}}' - name: task-name value: print-and-return - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: roll-die-pipeline.Succeeded name: print-and-return-driver template: system-container-driver @@ -581,8 +645,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-roll-die-pipeline}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-roll-die-pipeline"},"taskInfo":{"name":"roll-die-pipeline"}}' - name: task-name @@ -591,8 +655,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.roll-die-pipeline-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.roll-die-pipeline-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.roll-die-pipeline-driver.outputs.parameters.condition}}' depends: roll-die-pipeline-driver.Succeeded @@ -600,7 +664,7 @@ spec: template: comp-roll-die-pipeline inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -618,8 +682,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/if_else_with_oneof_artifacts.yaml b/test_data/compiled-workflows/if_else_with_oneof_artifacts.yaml index cb5bfe87126..1c1ee09efd8 100644 --- a/test_data/compiled-workflows/if_else_with_oneof_artifacts.yaml +++ b/test_data/compiled-workflows/if_else_with_oneof_artifacts.yaml @@ -79,8 +79,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -107,6 +107,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -116,13 +129,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -144,6 +161,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -173,6 +198,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -183,6 +212,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -222,6 +254,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -246,8 +285,8 @@ spec: value: '{{workflow.parameters.implementations-548a24cf68d38c6b9cd5ffa0cf3bff0ebc8c28e453df033e43d174b1b0799e44}}' - name: task-name value: param-to-artifact - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: param-to-artifact-driver template: system-container-driver - arguments: @@ -262,7 +301,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-2 outputs: {} @@ -278,8 +317,8 @@ spec: value: '{{workflow.parameters.implementations-548a24cf68d38c6b9cd5ffa0cf3bff0ebc8c28e453df033e43d174b1b0799e44}}' - name: task-name value: param-to-artifact-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: param-to-artifact-2-driver template: system-container-driver - arguments: @@ -294,7 +333,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-3 outputs: {} @@ -310,8 +349,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -322,8 +361,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -336,6 +375,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -345,6 +397,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -354,8 +410,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -364,9 +420,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -375,14 +431,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-2"},"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-Output''] == ''heads''"}}' @@ -392,8 +456,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-2-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-2-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-2-driver.outputs.parameters.condition}}' depends: condition-2-driver.Succeeded @@ -404,8 +468,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-3}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-3"},"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"!(inputs.parameter_values[''pipelinechannel--flip-coin-Output''] == ''heads'')"}}' @@ -415,8 +479,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-3-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-3-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-3-driver.outputs.parameters.condition}}' depends: condition-3-driver.Succeeded @@ -425,7 +489,7 @@ spec: when: '{{tasks.condition-3-driver.outputs.parameters.condition}} != false' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-branches-1 outputs: {} @@ -435,8 +499,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-branches-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-branches-1"},"dependentTasks":["flip-coin"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin"}}}},"taskInfo":{"name":"condition-branches-1"}}' - name: task-name @@ -446,8 +510,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-branches-1-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-branches-1-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-branches-1-driver.outputs.parameters.condition}}' depends: condition-branches-1-driver.Succeeded @@ -463,8 +527,8 @@ spec: value: '{{workflow.parameters.implementations-b7bc282093d4c682a88eee9264eed07197d2e4dc5276c37ed07b353ff7e961aa}}' - name: task-name value: flip-coin - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: flip-coin-driver template: system-container-driver - arguments: @@ -487,8 +551,8 @@ spec: value: '{{workflow.parameters.implementations-f4e8ee10b02bd6b6b90fdf2bb11d355cbb8b814138f9ef2935d245797f7bf430}}' - name: task-name value: print-artifact - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: condition-branches-1.Succeeded name: print-artifact-driver template: system-container-driver @@ -504,7 +568,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-flip-coin-pipeline outputs: {} @@ -514,8 +578,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-flip-coin-pipeline}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-flip-coin-pipeline"},"taskInfo":{"name":"flip-coin-pipeline"}}' - name: task-name @@ -524,8 +588,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.flip-coin-pipeline-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.flip-coin-pipeline-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.flip-coin-pipeline-driver.outputs.parameters.condition}}' depends: flip-coin-pipeline-driver.Succeeded @@ -541,8 +605,8 @@ spec: value: '{{workflow.parameters.implementations-f4e8ee10b02bd6b6b90fdf2bb11d355cbb8b814138f9ef2935d245797f7bf430}}' - name: task-name value: print-artifact - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: flip-coin-pipeline.Succeeded name: print-artifact-driver template: system-container-driver @@ -558,7 +622,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -576,8 +640,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/if_else_with_oneof_parameters.yaml b/test_data/compiled-workflows/if_else_with_oneof_parameters.yaml index 6c3acaaa30a..27ac859f16c 100644 --- a/test_data/compiled-workflows/if_else_with_oneof_parameters.yaml +++ b/test_data/compiled-workflows/if_else_with_oneof_parameters.yaml @@ -65,8 +65,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -93,6 +93,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -102,13 +115,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -130,6 +147,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -159,6 +184,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -169,6 +198,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -208,6 +240,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -233,8 +272,8 @@ spec: value: '{{workflow.parameters.implementations-cecb98cac1de5c44c981fe9bc4ca2eaa0b650231b182536eb374f37a191a09d2}}' - name: task-name value: print-and-return - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-and-return-driver template: system-container-driver - arguments: @@ -249,7 +288,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-2 outputs: {} @@ -266,8 +305,8 @@ spec: value: '{{workflow.parameters.implementations-cecb98cac1de5c44c981fe9bc4ca2eaa0b650231b182536eb374f37a191a09d2}}' - name: task-name value: print-and-return-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-and-return-2-driver template: system-container-driver - arguments: @@ -282,7 +321,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-3 outputs: {} @@ -298,8 +337,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -310,8 +349,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -324,6 +363,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -333,6 +385,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -342,8 +398,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -352,9 +408,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -363,14 +419,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-2"},"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-Output''] == ''heads''"}}' @@ -380,8 +444,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-2-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-2-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-2-driver.outputs.parameters.condition}}' depends: condition-2-driver.Succeeded @@ -392,8 +456,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-3}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-3"},"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"!(inputs.parameter_values[''pipelinechannel--flip-coin-Output''] == ''heads'')"}}' @@ -403,8 +467,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-3-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-3-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-3-driver.outputs.parameters.condition}}' depends: condition-3-driver.Succeeded @@ -413,7 +477,7 @@ spec: when: '{{tasks.condition-3-driver.outputs.parameters.condition}} != false' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-branches-1 outputs: {} @@ -423,8 +487,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-branches-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-branches-1"},"dependentTasks":["flip-coin"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin"}}}},"taskInfo":{"name":"condition-branches-1"}}' - name: task-name @@ -434,8 +498,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-branches-1-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-branches-1-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-branches-1-driver.outputs.parameters.condition}}' depends: condition-branches-1-driver.Succeeded @@ -451,8 +515,8 @@ spec: value: '{{workflow.parameters.implementations-b7bc282093d4c682a88eee9264eed07197d2e4dc5276c37ed07b353ff7e961aa}}' - name: task-name value: flip-coin - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: flip-coin-driver template: system-container-driver - arguments: @@ -475,8 +539,8 @@ spec: value: '{{workflow.parameters.implementations-cecb98cac1de5c44c981fe9bc4ca2eaa0b650231b182536eb374f37a191a09d2}}' - name: task-name value: print-and-return-3 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: condition-branches-1.Succeeded name: print-and-return-3-driver template: system-container-driver @@ -492,7 +556,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -510,8 +574,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/input_artifact.yaml b/test_data/compiled-workflows/input_artifact.yaml index 89d16286c2b..4fb87f38588 100644 --- a/test_data/compiled-workflows/input_artifact.yaml +++ b/test_data/compiled-workflows/input_artifact.yaml @@ -41,8 +41,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -69,6 +69,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -78,13 +91,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -106,6 +123,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -135,6 +160,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -145,6 +174,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -184,6 +216,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -208,8 +247,8 @@ spec: value: '{{workflow.parameters.implementations-c0e9d1cc85ce81739c3677ec2338cb29b8bad1837b123a910f0aa723a5fde46c}}' - name: task-name value: input-artifact - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: input-artifact-driver template: system-container-driver - arguments: @@ -224,7 +263,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -240,8 +279,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -252,8 +291,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -266,6 +305,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -275,6 +327,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -284,8 +340,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -294,9 +350,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -305,6 +361,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -319,8 +383,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/iris_pipeline_compiled.yaml b/test_data/compiled-workflows/iris_pipeline_compiled.yaml index 00bb6e0f83a..137ab974b64 100644 --- a/test_data/compiled-workflows/iris_pipeline_compiled.yaml +++ b/test_data/compiled-workflows/iris_pipeline_compiled.yaml @@ -94,8 +94,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -122,6 +122,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -131,13 +144,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -159,6 +176,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -188,6 +213,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -198,6 +227,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -237,6 +269,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -261,8 +300,8 @@ spec: value: '{{workflow.parameters.implementations-46b83eacfc26cd94d4579de62f52c8eb251e7093888e545381b90fcf7e1b26c8}}' - name: task-name value: create-dataset - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: create-dataset-driver template: system-container-driver - arguments: @@ -285,8 +324,8 @@ spec: value: '{{workflow.parameters.implementations-f666f111ab69031b13ddbe7fb9b938dc0c077c50cf3b7b5472f4d97d36627cb0}}' - name: task-name value: normalize-dataset - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: create-dataset.Succeeded name: normalize-dataset-driver template: system-container-driver @@ -310,8 +349,8 @@ spec: value: '{{workflow.parameters.implementations-0aef93036ec0e04e8c952f416203a009f7b67c1736c73e0a57cf513297ac4add}}' - name: task-name value: train-model - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: normalize-dataset.Succeeded name: train-model-driver template: system-container-driver @@ -327,7 +366,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -343,8 +382,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -355,8 +394,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -369,6 +408,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -378,6 +430,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -387,8 +443,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -397,9 +453,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -408,6 +464,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -422,8 +486,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/lightweight_python_functions_pipeline.yaml b/test_data/compiled-workflows/lightweight_python_functions_pipeline.yaml index 51b70783254..2512e2d0c12 100644 --- a/test_data/compiled-workflows/lightweight_python_functions_pipeline.yaml +++ b/test_data/compiled-workflows/lightweight_python_functions_pipeline.yaml @@ -96,8 +96,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -124,6 +124,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -133,13 +146,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -161,6 +178,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -190,6 +215,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -200,6 +229,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -239,6 +271,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -263,8 +302,8 @@ spec: value: '{{workflow.parameters.implementations-ea150e85c8a1d73e8fa5cc90cd210a9ae6d20007a7071b293f971fafdecbe6f4}}' - name: task-name value: preprocess - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: preprocess-driver template: system-container-driver - arguments: @@ -287,8 +326,8 @@ spec: value: '{{workflow.parameters.implementations-b36bc37867b11dd27d7e36a6abd032012300f22cad3a1d2c85ed1f7d868a4ae1}}' - name: task-name value: train - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: preprocess.Succeeded name: train-driver template: system-container-driver @@ -304,7 +343,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -320,8 +359,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -332,8 +371,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -346,6 +385,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -355,6 +407,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -364,8 +420,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -374,9 +430,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -385,6 +441,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -399,8 +463,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/lightweight_python_functions_with_outputs.yaml b/test_data/compiled-workflows/lightweight_python_functions_with_outputs.yaml index 69ac40a7575..140b248f958 100644 --- a/test_data/compiled-workflows/lightweight_python_functions_with_outputs.yaml +++ b/test_data/compiled-workflows/lightweight_python_functions_with_outputs.yaml @@ -88,8 +88,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -116,6 +116,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -125,13 +138,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -153,6 +170,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -182,6 +207,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -192,6 +221,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -231,6 +263,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -255,8 +294,8 @@ spec: value: '{{workflow.parameters.implementations-64327f64eaa6ee6d1d3875fb132b2b0220e21b5ab70d2e725bd77cefe61af74a}}' - name: task-name value: add-numbers - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: add-numbers-driver template: system-container-driver - arguments: @@ -279,8 +318,8 @@ spec: value: '{{workflow.parameters.implementations-22305325c661206b4123f78ea46d0c5195ad2ec0f2cb1c12e7d3796adfea3272}}' - name: task-name value: concat-message - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: concat-message-driver template: system-container-driver - arguments: @@ -303,8 +342,8 @@ spec: value: '{{workflow.parameters.implementations-8a22c1e65c445da5e19295ad1c0852d6baa362cde8d17969d80f385ffb3f744b}}' - name: task-name value: output-artifact - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: add-numbers.Succeeded && concat-message.Succeeded name: output-artifact-driver template: system-container-driver @@ -328,8 +367,8 @@ spec: value: '{{workflow.parameters.implementations-8b7c37a05dbc36da22206bd274fccfa32b8ca2751e413c2d48e41d700d278796}}' - name: task-name value: output-named-tuple - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: output-artifact.Succeeded name: output-named-tuple-driver template: system-container-driver @@ -345,7 +384,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -361,8 +400,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -373,8 +412,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -387,6 +426,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -396,6 +448,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -405,8 +461,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -415,9 +471,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -426,6 +482,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -440,8 +504,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/log_streaming_compiled.yaml b/test_data/compiled-workflows/log_streaming_compiled.yaml index 9cd30149337..8f915a150d8 100644 --- a/test_data/compiled-workflows/log_streaming_compiled.yaml +++ b/test_data/compiled-workflows/log_streaming_compiled.yaml @@ -45,8 +45,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -73,6 +73,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -82,13 +95,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -110,6 +127,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -139,6 +164,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -149,6 +178,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -188,6 +220,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -212,8 +251,8 @@ spec: value: '{{workflow.parameters.implementations-83903e2b86109679dccb14a8d66f84f860b49eb946f971670d98e0dc63e93cf6}}' - name: task-name value: print-message - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-message-driver template: system-container-driver - arguments: @@ -228,7 +267,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -244,8 +283,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -256,8 +295,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -270,6 +309,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -279,6 +331,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -288,8 +344,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -298,9 +354,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -309,6 +365,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -323,8 +387,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/long-running.yaml b/test_data/compiled-workflows/long-running.yaml index e5378760109..d12d29e9083 100644 --- a/test_data/compiled-workflows/long-running.yaml +++ b/test_data/compiled-workflows/long-running.yaml @@ -32,8 +32,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -60,6 +60,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -69,13 +82,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -97,6 +114,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -126,6 +151,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -136,6 +165,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -175,6 +207,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -199,8 +238,8 @@ spec: value: '{{workflow.parameters.implementations-50416a9fa23210403fe0e780d1dbd6667e4f73dfed854d15c7319fdd6ce87af8}}' - name: task-name value: wait-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: wait-op-driver template: system-container-driver - arguments: @@ -223,8 +262,8 @@ spec: value: '{{workflow.parameters.implementations-50416a9fa23210403fe0e780d1dbd6667e4f73dfed854d15c7319fdd6ce87af8}}' - name: task-name value: wait-op-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: wait-op.Succeeded name: wait-op-2-driver template: system-container-driver @@ -240,7 +279,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -256,8 +295,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -268,8 +307,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -282,6 +321,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -291,6 +343,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -300,8 +356,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -310,9 +366,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -321,6 +377,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -335,8 +399,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/loop_consume_upstream.yaml b/test_data/compiled-workflows/loop_consume_upstream.yaml index c10b9e84e7f..8b4beef6b52 100644 --- a/test_data/compiled-workflows/loop_consume_upstream.yaml +++ b/test_data/compiled-workflows/loop_consume_upstream.yaml @@ -89,8 +89,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -117,6 +117,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -126,13 +139,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -154,6 +171,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -183,6 +208,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -193,6 +222,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -232,6 +264,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -257,8 +296,10 @@ spec: value: '{{workflow.parameters.implementations-e95f5ecfe88450a38e0fd6e153fc9259d010d9ab920e3fe0dd141bf675c413df}}' - name: task-name value: create-file - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: create-file-driver template: system-container-driver - arguments: @@ -268,6 +309,8 @@ spec: - default: "false" name: cached-decision value: '{{tasks.create-file-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: create-file-driver.Succeeded name: create-file template: system-container-executor @@ -282,8 +325,10 @@ spec: value: '{{workflow.parameters.implementations-f59c851c3c0a1e03b3e2065384eb3c4665ce5f234998fed18cb6afb34ce005a7}}' - name: task-name value: read-file - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: create-file.Succeeded name: read-file-driver template: system-container-driver @@ -294,12 +339,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.read-file-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: read-file-driver.Succeeded name: read-file template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-1 outputs: {} @@ -315,8 +363,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -327,8 +375,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -341,6 +389,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -350,6 +411,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -359,8 +424,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -369,9 +434,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -380,62 +445,42 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-1}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-1"},"dependentTasks":["split-input"],"inputs":{"parameters":{"pipelinechannel--split-input-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-input"}}}},"parameterIterator":{"itemInput":"pipelinechannel--split-input-Output-loop-item","items":{"inputParameter":"pipelinechannel--split-input-Output"}},"taskInfo":{"name":"for-loop-1"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-1 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-1-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-1"},"dependentTasks":["split-input"],"inputs":{"parameters":{"pipelinechannel--split-input-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"split-input"}}}},"parameterIterator":{"itemInput":"pipelinechannel--split-input-Output-loop-item","items":{"inputParameter":"pipelinechannel--split-input-Output"}},"taskInfo":{"name":"for-loop-1"}}' + - name: task-name + value: for-loop-1 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-1-iteration + template: comp-for-loop-1 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-1-for-loop-1-iterator outputs: {} @@ -443,8 +488,8 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: split-input.Succeeded name: for-loop-1 template: comp-for-loop-1-for-loop-1-iterator @@ -459,8 +504,8 @@ spec: value: '{{workflow.parameters.implementations-b4e9d76b938751b0b1edf9cce7cc5817c0b9c54d34a1e06c3a226a46cc2c3f1e}}' - name: task-name value: print-input - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: split-input.Succeeded name: print-input-driver template: system-container-driver @@ -485,8 +530,8 @@ spec: value: '{{workflow.parameters.implementations-fc5dd8cf7a18cc02226309bb936fb376a98741769b2faba10d1bfc9960de8802}}' - name: task-name value: split-input - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: split-input-driver template: system-container-driver - arguments: @@ -501,7 +546,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -519,8 +564,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/metrics_visualization_v2.yaml b/test_data/compiled-workflows/metrics_visualization_v2.yaml index f374e5e8b10..c2857f8f020 100644 --- a/test_data/compiled-workflows/metrics_visualization_v2.yaml +++ b/test_data/compiled-workflows/metrics_visualization_v2.yaml @@ -133,8 +133,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -161,6 +161,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -170,13 +183,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -198,6 +215,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -227,6 +252,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -237,6 +266,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -276,6 +308,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -300,8 +339,8 @@ spec: value: '{{workflow.parameters.implementations-f3f63f614ffceea8307ebdcd5f7c7a8ba2bf88301e6b6f4ef2771a43a1ac2e55}}' - name: task-name value: digit-classification - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: digit-classification-driver template: system-container-driver - arguments: @@ -324,8 +363,8 @@ spec: value: '{{workflow.parameters.implementations-b3c7dd98d963c8b8e8f52c8c62b593f94e99c900733c383027ffba46765ac7fb}}' - name: task-name value: html-visualization - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: html-visualization-driver template: system-container-driver - arguments: @@ -348,8 +387,8 @@ spec: value: '{{workflow.parameters.implementations-9f3641fea11e44f30b9e256d336098510b78bd0f423c003ee0ce74da34c7fdc1}}' - name: task-name value: iris-sgdclassifier - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: iris-sgdclassifier-driver template: system-container-driver - arguments: @@ -372,8 +411,8 @@ spec: value: '{{workflow.parameters.implementations-b4e81501f4cd8d1ff945c7fd6ae18a8abb3f66daa3fe9e5d75df12714a185308}}' - name: task-name value: markdown-visualization - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: markdown-visualization-driver template: system-container-driver - arguments: @@ -396,8 +435,8 @@ spec: value: '{{workflow.parameters.implementations-58d11750818090dbfe31db33200e5a2dfc0b5b9d742ccdd90fea504248ab4dba}}' - name: task-name value: wine-classification - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: wine-classification-driver template: system-container-driver - arguments: @@ -412,7 +451,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -428,8 +467,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -440,8 +479,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -454,6 +493,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -463,6 +515,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -472,8 +528,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -482,9 +538,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -493,6 +549,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -507,8 +571,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/missing_kubernetes_optional_inputs.yaml b/test_data/compiled-workflows/missing_kubernetes_optional_inputs.yaml index 74e6976c9af..b14d972d414 100644 --- a/test_data/compiled-workflows/missing_kubernetes_optional_inputs.yaml +++ b/test_data/compiled-workflows/missing_kubernetes_optional_inputs.yaml @@ -44,8 +44,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -72,6 +72,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -81,13 +94,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -109,6 +126,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -138,6 +163,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -148,6 +177,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -187,6 +219,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -212,8 +251,8 @@ spec: value: '{{workflow.parameters.implementations-96322856eafd7d43227888c8781b0302ec66e8a5befa15af46a259fdf0571c35}}' - name: task-name value: log-message - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: kubernetes-config value: '{{workflow.parameters.kubernetes-comp-log-message}}' name: log-message-driver @@ -230,7 +269,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -246,8 +285,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -258,8 +297,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -272,6 +311,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -281,6 +333,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -290,8 +346,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -300,9 +356,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -311,6 +367,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -325,8 +389,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/mixed_parameters.yaml b/test_data/compiled-workflows/mixed_parameters.yaml index 1f0eaba674d..3651098afa5 100644 --- a/test_data/compiled-workflows/mixed_parameters.yaml +++ b/test_data/compiled-workflows/mixed_parameters.yaml @@ -58,8 +58,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -86,6 +86,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -95,13 +108,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -123,6 +140,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -152,6 +177,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -162,6 +191,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -201,6 +233,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -225,8 +264,8 @@ spec: value: '{{workflow.parameters.implementations-58716d11d7152c994e16a93768ef172bce97a8d239a026d258c0ef81751e3168}}' - name: task-name value: core-comp - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: core-comp-driver template: system-container-driver - arguments: @@ -241,7 +280,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-core outputs: {} @@ -257,8 +296,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -269,8 +308,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -283,6 +322,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -292,6 +344,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -301,8 +357,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -311,9 +367,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -322,14 +378,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-core}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{},"componentRef":{"name":"comp-core"},"taskInfo":{"name":"core"}}' - name: task-name @@ -338,8 +402,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.core-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.core-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.core-driver.outputs.parameters.condition}}' depends: core-driver.Succeeded @@ -347,7 +411,7 @@ spec: template: comp-core inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-mantle outputs: {} @@ -363,8 +427,8 @@ spec: value: '{{workflow.parameters.implementations-bada9af0362ed45ffb9dac372e0475396c75331f2b09988c0122ea4131e18462}}' - name: task-name value: crust-comp - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: mantle.Succeeded name: crust-comp-driver template: system-container-driver @@ -382,8 +446,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-mantle}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{},"componentRef":{"name":"comp-mantle"},"taskInfo":{"name":"mantle"}}' - name: task-name @@ -392,8 +456,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.mantle-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.mantle-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.mantle-driver.outputs.parameters.condition}}' depends: mantle-driver.Succeeded @@ -401,7 +465,7 @@ spec: template: comp-mantle inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -419,8 +483,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/modelcar.yaml b/test_data/compiled-workflows/modelcar.yaml index 212ff064f02..fbe1550344c 100644 --- a/test_data/compiled-workflows/modelcar.yaml +++ b/test_data/compiled-workflows/modelcar.yaml @@ -66,8 +66,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -94,6 +94,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -103,13 +116,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -131,6 +148,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -160,6 +185,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -170,6 +199,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -209,6 +241,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -225,26 +264,20 @@ spec: args: - --executor_type - importer - - --task_spec - - '{{inputs.parameters.task}}' - - --component_spec - - '{{inputs.parameters.component}}' + - --task_name + - '{{inputs.parameters.task-name}}' - --importer_spec - '{{inputs.parameters.importer}}' - --pipeline_name - pipeline-with-modelcar-model - --run_id - '{{workflow.uid}}' - - --parent_dag_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --pod_name - $(KFP_POD_NAME) - --pod_uid - $(KFP_POD_UID) - - --mlmd_server_address - - $(METADATA_GRPC_SERVICE_HOST) - - --mlmd_server_port - - $(METADATA_GRPC_SERVICE_PORT) command: - launcher-v2 env: @@ -256,6 +289,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -269,15 +306,26 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - - name: task - - name: component + - name: task-name - name: importer - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: system-importer outputs: {} + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -290,8 +338,8 @@ spec: value: '{{workflow.parameters.implementations-4f60483a1bee7f772ee3001cf05964083d2f9641615161626b6146636da04fe0}}' - name: task-name value: build-model-car - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: build-model-car-driver template: system-container-driver - arguments: @@ -314,8 +362,8 @@ spec: value: '{{workflow.parameters.implementations-fc08120adea24f34a0c74fbd8e185e63caa9eca74abf250b23e4ef9c8a3d6878}}' - name: task-name value: get-model-files-list - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: importer.Succeeded name: get-model-files-list-driver template: system-container-driver @@ -331,19 +379,17 @@ spec: template: system-container-executor - arguments: parameters: - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"uri":{"componentInputParameter":"model_uri"}}},"taskInfo":{"name":"importer"}}' - - name: component - value: '{{workflow.parameters.components-comp-importer}}' + - name: task-name + value: importer - name: importer value: '{{workflow.parameters.implementations-comp-importer}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: importer template: system-importer inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -359,8 +405,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -371,8 +417,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -385,6 +431,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -394,6 +453,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -403,8 +466,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -413,9 +476,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -424,6 +487,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -438,8 +509,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/multiple_artifacts_namedtuple.yaml b/test_data/compiled-workflows/multiple_artifacts_namedtuple.yaml index 3c908b0dc8c..bec6d3109c3 100644 --- a/test_data/compiled-workflows/multiple_artifacts_namedtuple.yaml +++ b/test_data/compiled-workflows/multiple_artifacts_namedtuple.yaml @@ -62,8 +62,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -90,6 +90,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -99,13 +112,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -127,6 +144,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -156,6 +181,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -166,6 +195,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -205,6 +237,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -229,8 +268,8 @@ spec: value: '{{workflow.parameters.implementations-5502b96ca3f6aca433522fdb2d6365445dee6a291a29d192a3665429bcba4eca}}' - name: task-name value: core-comp - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: core-comp-driver template: system-container-driver - arguments: @@ -245,7 +284,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-core outputs: {} @@ -261,8 +300,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -273,8 +312,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -287,6 +326,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -296,6 +348,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -305,8 +361,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -315,9 +371,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -326,14 +382,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-core}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{},"componentRef":{"name":"comp-core"},"taskInfo":{"name":"core"}}' - name: task-name @@ -342,8 +406,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.core-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.core-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.core-driver.outputs.parameters.condition}}' depends: core-driver.Succeeded @@ -351,7 +415,7 @@ spec: template: comp-core inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-mantle outputs: {} @@ -367,8 +431,8 @@ spec: value: '{{workflow.parameters.implementations-41d6f43a77336abfc42373262a0d3f817036dea59881c80ac128ebff19d9ac06}}' - name: task-name value: crust-comp - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: mantle.Succeeded name: crust-comp-driver template: system-container-driver @@ -386,8 +450,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-mantle}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{},"componentRef":{"name":"comp-mantle"},"taskInfo":{"name":"mantle"}}' - name: task-name @@ -396,8 +460,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.mantle-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.mantle-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.mantle-driver.outputs.parameters.condition}}' depends: mantle-driver.Succeeded @@ -405,7 +469,7 @@ spec: template: comp-mantle inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -423,8 +487,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/multiple_parameters_namedtuple.yaml b/test_data/compiled-workflows/multiple_parameters_namedtuple.yaml index 6e86a053905..8441eedafbc 100644 --- a/test_data/compiled-workflows/multiple_parameters_namedtuple.yaml +++ b/test_data/compiled-workflows/multiple_parameters_namedtuple.yaml @@ -61,8 +61,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -89,6 +89,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -98,13 +111,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -126,6 +143,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -155,6 +180,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -165,6 +194,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -204,6 +236,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -228,8 +267,8 @@ spec: value: '{{workflow.parameters.implementations-1ebf161b377e2baf7660119b69476134f6b72ee853a2bf59b5c77ae20d35e4df}}' - name: task-name value: core-comp - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: core-comp-driver template: system-container-driver - arguments: @@ -244,7 +283,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-core outputs: {} @@ -260,8 +299,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -272,8 +311,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -286,6 +325,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -295,6 +347,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -304,8 +360,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -314,9 +370,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -325,14 +381,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-core}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{},"componentRef":{"name":"comp-core"},"taskInfo":{"name":"core"}}' - name: task-name @@ -341,8 +405,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.core-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.core-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.core-driver.outputs.parameters.condition}}' depends: core-driver.Succeeded @@ -350,7 +414,7 @@ spec: template: comp-core inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-mantle outputs: {} @@ -366,8 +430,8 @@ spec: value: '{{workflow.parameters.implementations-4e3e2b26bcc3e5ebd2165f67c13916a89b3269c76c31b39a2e59f15e82b89020}}' - name: task-name value: crust-comp - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: mantle.Succeeded name: crust-comp-driver template: system-container-driver @@ -385,8 +449,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-mantle}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{},"componentRef":{"name":"comp-mantle"},"taskInfo":{"name":"mantle"}}' - name: task-name @@ -395,8 +459,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.mantle-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.mantle-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.mantle-driver.outputs.parameters.condition}}' depends: mantle-driver.Succeeded @@ -404,7 +468,7 @@ spec: template: comp-mantle inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -422,8 +486,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/nested_pipeline_opt_input_child_level_compiled.yaml b/test_data/compiled-workflows/nested_pipeline_opt_input_child_level_compiled.yaml index 8c1067fef07..9794a6bdbd3 100644 --- a/test_data/compiled-workflows/nested_pipeline_opt_input_child_level_compiled.yaml +++ b/test_data/compiled-workflows/nested_pipeline_opt_input_child_level_compiled.yaml @@ -121,8 +121,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -149,6 +149,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -158,13 +171,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -186,6 +203,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -215,6 +240,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -225,6 +254,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -264,6 +296,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -288,8 +327,8 @@ spec: value: '{{workflow.parameters.implementations-67acbf0e9a3f6bfa27bfaf8979e8f91a80b1de04a3c0dffff6d475e5d8c8a2fc}}' - name: task-name value: component-a-bool - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-a-bool-driver template: system-container-driver - arguments: @@ -312,8 +351,8 @@ spec: value: '{{workflow.parameters.implementations-1b21465d9fecfb690a65b5d22e0ec7540a7652fb013f3386f36e8255dd2ab5ca}}' - name: task-name value: component-a-int - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-a-int-driver template: system-container-driver - arguments: @@ -336,8 +375,8 @@ spec: value: '{{workflow.parameters.implementations-e793c84d1e31d803ca345dd2c0bf915dd5e8c356537d91a002e90837789e73be}}' - name: task-name value: component-a-str - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-a-str-driver template: system-container-driver - arguments: @@ -360,8 +399,8 @@ spec: value: '{{workflow.parameters.implementations-847405d2298b3810654100892c836106fe0397c48f59c14f51827ef975efcd95}}' - name: task-name value: component-b-bool - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-b-bool-driver template: system-container-driver - arguments: @@ -384,8 +423,8 @@ spec: value: '{{workflow.parameters.implementations-fcf5d3023cec6f80952cd3ce9a483536e84ac6ae5f853a7178b710495776f2a5}}' - name: task-name value: component-b-int - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-b-int-driver template: system-container-driver - arguments: @@ -408,8 +447,8 @@ spec: value: '{{workflow.parameters.implementations-319fe909f6b49ecb503c73c273e622302238293692577f00c9d93d0071fef705}}' - name: task-name value: component-b-str - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-b-str-driver template: system-container-driver - arguments: @@ -424,7 +463,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-nested-pipeline outputs: {} @@ -440,8 +479,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -452,8 +491,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -466,6 +505,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -475,6 +527,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -484,8 +540,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -494,9 +550,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -505,14 +561,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-nested-pipeline}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{},"componentRef":{"name":"comp-nested-pipeline"},"inputs":{"parameters":{"nestedInputBool1":{"runtimeValue":{"constant":true}},"nestedInputInt1":{"runtimeValue":{"constant":1}},"nestedInputStr1":{"runtimeValue":{"constant":"Input - pipeline"}}}},"taskInfo":{"name":"nested-pipeline"}}' @@ -522,8 +586,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.nested-pipeline-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.nested-pipeline-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.nested-pipeline-driver.outputs.parameters.condition}}' depends: nested-pipeline-driver.Succeeded @@ -531,7 +595,7 @@ spec: template: comp-nested-pipeline inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -549,8 +613,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/nested_pipeline_opt_inputs_nil_compiled.yaml b/test_data/compiled-workflows/nested_pipeline_opt_inputs_nil_compiled.yaml index 4a0d9a9abfe..85bece338ea 100644 --- a/test_data/compiled-workflows/nested_pipeline_opt_inputs_nil_compiled.yaml +++ b/test_data/compiled-workflows/nested_pipeline_opt_inputs_nil_compiled.yaml @@ -72,8 +72,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -100,6 +100,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -109,13 +122,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -137,6 +154,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -166,6 +191,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -176,6 +205,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -215,6 +247,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -239,8 +278,8 @@ spec: value: '{{workflow.parameters.implementations-ea55a4b32ea777d21165417782052ce723fbedd5f411eb3ea22cf9ef7e574167}}' - name: task-name value: component-bool - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-bool-driver template: system-container-driver - arguments: @@ -263,8 +302,8 @@ spec: value: '{{workflow.parameters.implementations-edd8fe327d9525f18fce56ac94b03f295b8e9dff3dadf0403760de87f18ddcb2}}' - name: task-name value: component-int - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-int-driver template: system-container-driver - arguments: @@ -287,8 +326,8 @@ spec: value: '{{workflow.parameters.implementations-4a1ad43e53e46919879caa0a1100eef09d1fbcd99a4afae74fabbe11fcb381ce}}' - name: task-name value: component-str - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-str-driver template: system-container-driver - arguments: @@ -303,7 +342,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-nested-pipeline outputs: {} @@ -319,8 +358,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -331,8 +370,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -345,6 +384,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -354,6 +406,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -363,8 +419,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -373,9 +429,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -384,14 +440,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-nested-pipeline}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{},"componentRef":{"name":"comp-nested-pipeline"},"taskInfo":{"name":"nested-pipeline"}}' - name: task-name @@ -400,8 +464,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.nested-pipeline-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.nested-pipeline-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.nested-pipeline-driver.outputs.parameters.condition}}' depends: nested-pipeline-driver.Succeeded @@ -409,7 +473,7 @@ spec: template: comp-nested-pipeline inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -427,8 +491,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/nested_pipeline_opt_inputs_parent_level_compiled.yaml b/test_data/compiled-workflows/nested_pipeline_opt_inputs_parent_level_compiled.yaml index cadf245eb26..9ff5873bc27 100644 --- a/test_data/compiled-workflows/nested_pipeline_opt_inputs_parent_level_compiled.yaml +++ b/test_data/compiled-workflows/nested_pipeline_opt_inputs_parent_level_compiled.yaml @@ -124,8 +124,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -152,6 +152,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -161,13 +174,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -189,6 +206,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -218,6 +243,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -228,6 +257,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -267,6 +299,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -291,8 +330,8 @@ spec: value: '{{workflow.parameters.implementations-3fb1a1c38ece9fdf50975220e7cfae8f23547ef21522393d7e590333a4dbfd75}}' - name: task-name value: component-nil-bool-default - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-nil-bool-default-driver template: system-container-driver - arguments: @@ -315,8 +354,8 @@ spec: value: '{{workflow.parameters.implementations-22fb7cbea158fa49c81562ab2996be3fe3b07c161e8979367050d728d411e29c}}' - name: task-name value: component-nil-int-default - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-nil-int-default-driver template: system-container-driver - arguments: @@ -339,8 +378,8 @@ spec: value: '{{workflow.parameters.implementations-af9421038bc7cb5eeb32efd4d2a0c050a4d7988646bac55e6aaaa06cabc9304d}}' - name: task-name value: component-nil-str-default - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-nil-str-default-driver template: system-container-driver - arguments: @@ -355,7 +394,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-nested-pipeline-nil-defaults outputs: {} @@ -371,8 +410,8 @@ spec: value: '{{workflow.parameters.implementations-0471bffb66a33e2f351040cca3cec0486ec93f968cc8eae19f63a7c81e9f5916}}' - name: task-name value: component-bool-default - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-bool-default-driver template: system-container-driver - arguments: @@ -395,8 +434,8 @@ spec: value: '{{workflow.parameters.implementations-0f1bb762821e1923f608b9b5c75f9fb3e4a628508c2343e9cc383eff2bc0ae3e}}' - name: task-name value: component-int-default - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-int-default-driver template: system-container-driver - arguments: @@ -419,8 +458,8 @@ spec: value: '{{workflow.parameters.implementations-fb41f663306ed858f33b40f076339dd4af802def6164ef138a8c278c8dd2647e}}' - name: task-name value: component-str-default - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-str-default-driver template: system-container-driver - arguments: @@ -435,7 +474,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-nested-pipeline-non-nil-defaults outputs: {} @@ -451,8 +490,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -463,8 +502,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -477,6 +516,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -486,6 +538,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -495,8 +551,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -505,9 +561,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -516,14 +572,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-nested-pipeline-nil-defaults}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{},"componentRef":{"name":"comp-nested-pipeline-nil-defaults"},"inputs":{"parameters":{"nestedInputBool":{"componentInputParameter":"inputBool"},"nestedInputInt":{"componentInputParameter":"inputInt"},"nestedInputStr":{"componentInputParameter":"inputStr"}}},"taskInfo":{"name":"nested-pipeline-nil-defaults"}}' - name: task-name @@ -532,8 +596,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.nested-pipeline-nil-defaults-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.nested-pipeline-nil-defaults-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.nested-pipeline-nil-defaults-driver.outputs.parameters.condition}}' depends: nested-pipeline-nil-defaults-driver.Succeeded @@ -543,8 +607,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-nested-pipeline-non-nil-defaults}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{},"componentRef":{"name":"comp-nested-pipeline-non-nil-defaults"},"inputs":{"parameters":{"nestedInputBool":{"componentInputParameter":"inputBool"},"nestedInputInt":{"componentInputParameter":"inputInt"},"nestedInputStr":{"componentInputParameter":"inputStr"}}},"taskInfo":{"name":"nested-pipeline-non-nil-defaults"}}' - name: task-name @@ -553,8 +617,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.nested-pipeline-non-nil-defaults-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.nested-pipeline-non-nil-defaults-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.nested-pipeline-non-nil-defaults-driver.outputs.parameters.condition}}' depends: nested-pipeline-non-nil-defaults-driver.Succeeded @@ -562,7 +626,7 @@ spec: template: comp-nested-pipeline-non-nil-defaults inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -581,8 +645,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/nested_return.yaml b/test_data/compiled-workflows/nested_return.yaml index 2f97c738afb..8f885fea46e 100644 --- a/test_data/compiled-workflows/nested_return.yaml +++ b/test_data/compiled-workflows/nested_return.yaml @@ -42,8 +42,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -70,6 +70,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -79,13 +92,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -107,6 +124,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -136,6 +161,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -146,6 +175,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -185,6 +217,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -209,8 +248,8 @@ spec: value: '{{workflow.parameters.implementations-70e868fc6ce044c86e37af5f3bcd230d34c20381b89800a6273edd2f80a3798a}}' - name: task-name value: nested-return - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: nested-return-driver template: system-container-driver - arguments: @@ -225,7 +264,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -241,8 +280,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -253,8 +292,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -267,6 +306,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -276,6 +328,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -285,8 +341,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -295,9 +351,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -306,6 +362,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -320,8 +384,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/nested_with_parameters.yaml b/test_data/compiled-workflows/nested_with_parameters.yaml index 0ccadef5840..71ed776a5b4 100644 --- a/test_data/compiled-workflows/nested_with_parameters.yaml +++ b/test_data/compiled-workflows/nested_with_parameters.yaml @@ -74,8 +74,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -102,6 +102,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -111,13 +124,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -139,6 +156,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -168,6 +193,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -178,6 +207,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -217,6 +249,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -241,8 +280,10 @@ spec: value: '{{workflow.parameters.implementations-a2ff9335a8c3625e01d0cf6e331727c113a989bfb48adfd433b90721c60b3bf7}}' - name: task-name value: add-two-nums - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: double.Succeeded && double-2.Succeeded name: add-two-nums-driver template: system-container-driver @@ -253,6 +294,8 @@ spec: - default: "false" name: cached-decision value: '{{tasks.add-two-nums-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: add-two-nums-driver.Succeeded name: add-two-nums template: system-container-executor @@ -266,8 +309,10 @@ spec: value: '{{workflow.parameters.implementations-f79f58ed4b630aadad448078027429b1d8053e45ed52bed5961317a8e3ebf5e3}}' - name: task-name value: double - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: double-driver template: system-container-driver - arguments: @@ -277,6 +322,8 @@ spec: - default: "false" name: cached-decision value: '{{tasks.double-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: double-driver.Succeeded name: double template: system-container-executor @@ -290,8 +337,10 @@ spec: value: '{{workflow.parameters.implementations-f79f58ed4b630aadad448078027429b1d8053e45ed52bed5961317a8e3ebf5e3}}' - name: task-name value: double-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: double-2-driver template: system-container-driver - arguments: @@ -301,12 +350,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.double-2-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: double-2-driver.Succeeded name: double-2 template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-4 outputs: {} @@ -322,8 +374,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -334,8 +386,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -348,6 +400,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -357,6 +422,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -366,8 +435,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -376,9 +445,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -387,64 +456,43 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-4}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-4"},"inputs":{"parameters":{"pipelinechannel--loop-item-param-1":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[1, - 2, 3]"}},"taskInfo":{"name":"for-loop-4"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-4 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-4-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-4}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-4"},"inputs":{"parameters":{"pipelinechannel--loop-item-param-1":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[1, 2, 3]"}},"taskInfo":{"name":"for-loop-4"}}' + - name: task-name + value: for-loop-4 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-4-iteration + template: comp-for-loop-4 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-4-for-loop-4-iterator outputs: {} @@ -452,46 +500,18 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: for-loop-4 - template: comp-for-loop-4-for-loop-4-iterator - inputs: - parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-2 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: iteration-index value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, - 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-2 + name: for-loop-4 + template: comp-for-loop-4-for-loop-4-iterator inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-2-iteration + name: comp-for-loop-2 outputs: {} - dag: tasks: @@ -499,27 +519,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + - name: task-name + value: for-loop-2 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-2-iteration + template: comp-for-loop-2 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-2-for-loop-2-iterator outputs: {} @@ -535,8 +557,8 @@ spec: value: '{{workflow.parameters.implementations-1be5f8f8f37d8780d2254662338636e64bb02c2cd31f4099b84c292a06f04cd3}}' - name: task-name value: add - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: for-loop-2.Succeeded name: add-driver template: system-container-driver @@ -552,13 +574,13 @@ spec: template: system-container-executor - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-2 template: comp-for-loop-2-for-loop-2-iterator inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -576,8 +598,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/notebook_component_mixed.yaml b/test_data/compiled-workflows/notebook_component_mixed.yaml index 028dda7f617..29c737d4e35 100644 --- a/test_data/compiled-workflows/notebook_component_mixed.yaml +++ b/test_data/compiled-workflows/notebook_component_mixed.yaml @@ -270,8 +270,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -298,6 +298,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -307,13 +320,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -335,6 +352,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -364,6 +389,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -374,6 +403,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -413,6 +445,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -437,8 +476,8 @@ spec: value: '{{workflow.parameters.implementations-5e4fe14151999fbde09ad1cf2ea1ef08988b5557bb7ffb2f26e71b48051a0b24}}' - name: task-name value: evaluate-model - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: train-model.Succeeded name: evaluate-model-driver template: system-container-driver @@ -462,8 +501,8 @@ spec: value: '{{workflow.parameters.implementations-a389d0c5411da97fdf7ec55aea7ffb8d99e46f46230c9b8890c6ee0fed4c920c}}' - name: task-name value: preprocess - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: preprocess-driver template: system-container-driver - arguments: @@ -486,8 +525,8 @@ spec: value: '{{workflow.parameters.implementations-428abe73fd2527425a6d4eba6c35314506906ee691a0dbe032573b8c130f58d1}}' - name: task-name value: train-model - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: preprocess.Succeeded name: train-model-driver template: system-container-driver @@ -503,7 +542,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -519,8 +558,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -531,8 +570,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -545,6 +584,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -554,6 +606,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -563,8 +619,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -573,9 +629,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -584,6 +640,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -598,8 +662,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/notebook_component_simple.yaml b/test_data/compiled-workflows/notebook_component_simple.yaml index d2b85c86866..844c14d996e 100644 --- a/test_data/compiled-workflows/notebook_component_simple.yaml +++ b/test_data/compiled-workflows/notebook_component_simple.yaml @@ -139,8 +139,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -167,6 +167,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -176,13 +189,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -204,6 +221,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -233,6 +258,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -243,6 +272,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -282,6 +314,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -306,8 +345,8 @@ spec: value: '{{workflow.parameters.implementations-b5cadd2bcc71c6f13dc450182b4eb137eb62b94bdd5a170d964d6f8dec0260a5}}' - name: task-name value: run-train-notebook - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: run-train-notebook-driver template: system-container-driver - arguments: @@ -322,7 +361,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -338,8 +377,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -350,8 +389,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -364,6 +403,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -373,6 +425,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -382,8 +438,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -392,9 +448,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -403,6 +459,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -417,8 +481,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/output_metrics.yaml b/test_data/compiled-workflows/output_metrics.yaml index 99e72873e95..8904952f765 100644 --- a/test_data/compiled-workflows/output_metrics.yaml +++ b/test_data/compiled-workflows/output_metrics.yaml @@ -43,8 +43,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -71,6 +71,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -80,13 +93,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -108,6 +125,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -137,6 +162,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -147,6 +176,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -186,6 +218,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -210,8 +249,8 @@ spec: value: '{{workflow.parameters.implementations-ab5b53a52a65c78da1d8dcfca19f7568d6844ad951c971baa75d318f6528cd97}}' - name: task-name value: output-metrics - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: output-metrics-driver template: system-container-driver - arguments: @@ -226,7 +265,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -242,8 +281,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -254,8 +293,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -268,6 +307,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -277,6 +329,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -286,8 +342,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -296,9 +352,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -307,6 +363,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -321,8 +385,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/parallel_for_after_dependency.yaml b/test_data/compiled-workflows/parallel_for_after_dependency.yaml index 81c0ad8e624..e6025c34b51 100644 --- a/test_data/compiled-workflows/parallel_for_after_dependency.yaml +++ b/test_data/compiled-workflows/parallel_for_after_dependency.yaml @@ -44,8 +44,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -72,6 +72,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -81,13 +94,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -109,6 +126,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -138,6 +163,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -148,6 +177,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -187,6 +219,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -211,8 +250,10 @@ spec: value: '{{workflow.parameters.implementations-968dd0fe10602246a7925b6c93838c43ef3e0773013d8f5c8cc15c5d7c6e1179}}' - name: task-name value: print-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-op-driver template: system-container-driver - arguments: @@ -222,12 +263,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-op-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-op-driver.Succeeded name: print-op template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-2 outputs: {} @@ -243,8 +287,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -255,8 +299,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -269,6 +313,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -278,6 +335,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -287,8 +348,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -297,9 +358,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -308,64 +369,43 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, - 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-2 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-2-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + - name: task-name + value: for-loop-2 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-2-iteration + template: comp-for-loop-2 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-2-for-loop-2-iterator outputs: {} @@ -373,8 +413,8 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-2 template: comp-for-loop-2-for-loop-2-iterator - arguments: @@ -387,8 +427,8 @@ spec: value: '{{workflow.parameters.implementations-968dd0fe10602246a7925b6c93838c43ef3e0773013d8f5c8cc15c5d7c6e1179}}' - name: task-name value: print-op-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: for-loop-2.Succeeded name: print-op-2-driver template: system-container-driver @@ -412,8 +452,8 @@ spec: value: '{{workflow.parameters.implementations-968dd0fe10602246a7925b6c93838c43ef3e0773013d8f5c8cc15c5d7c6e1179}}' - name: task-name value: print-op-3 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: for-loop-2.Succeeded name: print-op-3-driver template: system-container-driver @@ -429,7 +469,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -447,8 +487,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/parameter_cache.yaml b/test_data/compiled-workflows/parameter_cache.yaml index 661ea64fa19..35239b901b8 100644 --- a/test_data/compiled-workflows/parameter_cache.yaml +++ b/test_data/compiled-workflows/parameter_cache.yaml @@ -58,8 +58,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -86,6 +86,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -95,13 +108,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -123,6 +140,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -152,6 +177,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -162,6 +191,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -201,6 +233,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -225,8 +264,8 @@ spec: value: '{{workflow.parameters.implementations-0f16b9e977a5e18f1fdc3e157b0997b82f1f8dc81f77d99e59f429e48800530c}}' - name: task-name value: core-comp - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: core-comp-driver template: system-container-driver - arguments: @@ -241,7 +280,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-core outputs: {} @@ -257,8 +296,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -269,8 +308,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -283,6 +322,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -292,6 +344,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -301,8 +357,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -311,9 +367,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -322,14 +378,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-core}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-core"},"taskInfo":{"name":"core"}}' - name: task-name @@ -338,8 +402,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.core-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.core-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.core-driver.outputs.parameters.condition}}' depends: core-driver.Succeeded @@ -347,7 +411,7 @@ spec: template: comp-core inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-mantle outputs: {} @@ -363,8 +427,8 @@ spec: value: '{{workflow.parameters.implementations-e25bc8f7f685ab080a6b49947d416eb8ca7a6937fa27801a940c2bdca54fc786}}' - name: task-name value: crust-comp - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: mantle.Succeeded name: crust-comp-driver template: system-container-driver @@ -382,8 +446,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-mantle}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-mantle"},"taskInfo":{"name":"mantle"}}' - name: task-name @@ -392,8 +456,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.mantle-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.mantle-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.mantle-driver.outputs.parameters.condition}}' depends: mantle-driver.Succeeded @@ -401,7 +465,7 @@ spec: template: comp-mantle inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -419,8 +483,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/parameter_oneof.yaml b/test_data/compiled-workflows/parameter_oneof.yaml index ec02a4cf23e..868b82239b1 100644 --- a/test_data/compiled-workflows/parameter_oneof.yaml +++ b/test_data/compiled-workflows/parameter_oneof.yaml @@ -97,8 +97,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -125,6 +125,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -134,13 +147,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -162,6 +179,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -191,6 +216,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -201,6 +230,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -240,6 +272,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -265,8 +304,8 @@ spec: value: '{{workflow.parameters.implementations-f8f8a77c7beb5b190f9fb182fe0e0dd40b3a7381615f2c349f0d398819f532c3}}' - name: task-name value: core-comp - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: core-comp-driver template: system-container-driver - arguments: @@ -281,7 +320,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-2 outputs: {} @@ -298,8 +337,8 @@ spec: value: '{{workflow.parameters.implementations-c2e197395954c8d0af830daca22f180b3ccec8198dd5795ab265dec857a88316}}' - name: task-name value: core-output-comp - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: core-output-comp-driver template: system-container-driver - arguments: @@ -314,7 +353,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-3 outputs: {} @@ -330,8 +369,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -342,8 +381,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -356,6 +395,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -365,6 +417,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -374,8 +430,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -384,9 +440,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -395,14 +451,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-2"},"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-Output''] == ''heads''"}}' @@ -412,8 +476,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-2-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-2-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-2-driver.outputs.parameters.condition}}' depends: condition-2-driver.Succeeded @@ -424,8 +488,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-3}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-3"},"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"componentInputParameter":"pipelinechannel--flip-coin-Output"}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"!(inputs.parameter_values[''pipelinechannel--flip-coin-Output''] == ''heads'')"}}' @@ -435,8 +499,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-3-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-3-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-3-driver.outputs.parameters.condition}}' depends: condition-3-driver.Succeeded @@ -445,7 +509,7 @@ spec: when: '{{tasks.condition-3-driver.outputs.parameters.condition}} != false' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-branches-1 outputs: {} @@ -455,8 +519,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-branches-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-branches-1"},"dependentTasks":["flip-coin"],"inputs":{"parameters":{"pipelinechannel--flip-coin-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin"}}}},"taskInfo":{"name":"condition-branches-1"}}' - name: task-name @@ -466,8 +530,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-branches-1-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-branches-1-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-branches-1-driver.outputs.parameters.condition}}' depends: condition-branches-1-driver.Succeeded @@ -483,8 +547,8 @@ spec: value: '{{workflow.parameters.implementations-b7bc282093d4c682a88eee9264eed07197d2e4dc5276c37ed07b353ff7e961aa}}' - name: task-name value: flip-coin - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: flip-coin-driver template: system-container-driver - arguments: @@ -499,7 +563,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-core outputs: {} @@ -509,8 +573,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-core}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{},"componentRef":{"name":"comp-core"},"taskInfo":{"name":"core"}}' - name: task-name @@ -519,8 +583,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.core-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.core-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.core-driver.outputs.parameters.condition}}' depends: core-driver.Succeeded @@ -528,7 +592,7 @@ spec: template: comp-core inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-mantle outputs: {} @@ -544,8 +608,8 @@ spec: value: '{{workflow.parameters.implementations-e25bc8f7f685ab080a6b49947d416eb8ca7a6937fa27801a940c2bdca54fc786}}' - name: task-name value: crust-comp - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: mantle.Succeeded name: crust-comp-driver template: system-container-driver @@ -563,8 +627,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-mantle}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{},"componentRef":{"name":"comp-mantle"},"taskInfo":{"name":"mantle"}}' - name: task-name @@ -573,8 +637,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.mantle-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.mantle-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.mantle-driver.outputs.parameters.condition}}' depends: mantle-driver.Succeeded @@ -582,7 +646,7 @@ spec: template: comp-mantle inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -600,8 +664,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/parameters_complex.yaml b/test_data/compiled-workflows/parameters_complex.yaml index 05f09dedbc2..db3834da757 100644 --- a/test_data/compiled-workflows/parameters_complex.yaml +++ b/test_data/compiled-workflows/parameters_complex.yaml @@ -91,8 +91,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -119,6 +119,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -128,13 +141,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -156,6 +173,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -185,6 +210,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -195,6 +224,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -234,6 +266,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -258,8 +297,10 @@ spec: value: '{{workflow.parameters.implementations-f79f58ed4b630aadad448078027429b1d8053e45ed52bed5961317a8e3ebf5e3}}' - name: task-name value: double-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: double-2-driver template: system-container-driver - arguments: @@ -269,12 +310,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.double-2-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: double-2-driver.Succeeded name: double-2 template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-4 outputs: {} @@ -290,8 +334,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -302,8 +346,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -316,6 +360,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -325,6 +382,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -334,8 +395,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -344,9 +405,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -355,64 +416,43 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-4}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[4, - 5, 6]"}},"taskInfo":{"name":"for-loop-4"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-4 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-4-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-4}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[4, 5, 6]"}},"taskInfo":{"name":"for-loop-4"}}' + - name: task-name + value: for-loop-4 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-4-iteration + template: comp-for-loop-4 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-4-for-loop-4-iterator outputs: {} @@ -428,8 +468,10 @@ spec: value: '{{workflow.parameters.implementations-f79f58ed4b630aadad448078027429b1d8053e45ed52bed5961317a8e3ebf5e3}}' - name: task-name value: double - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: double-driver template: system-container-driver - arguments: @@ -439,13 +481,17 @@ spec: - default: "false" name: cached-decision value: '{{tasks.double-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: double-driver.Succeeded name: double template: system-container-executor - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: for-loop-4 template: comp-for-loop-4-for-loop-4-iterator - arguments: @@ -458,8 +504,10 @@ spec: value: '{{workflow.parameters.implementations-8f8f327236d0dee95a8f9e3066a3489084583a3ea55cfbc385dc232fa4205cc3}}' - name: task-name value: simple-add - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: for-loop-4.Succeeded name: simple-add-driver template: system-container-driver @@ -470,12 +518,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.simple-add-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: simple-add-driver.Succeeded name: simple-add template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-2 outputs: {} @@ -491,8 +542,10 @@ spec: value: '{{workflow.parameters.implementations-85ab7701a46287a3152375690acb08e729df2da42b3ddf49938ef1f9be7e8fb4}}' - name: task-name value: nested-add-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: nested-add-2-driver template: system-container-driver - arguments: @@ -502,6 +555,8 @@ spec: - default: "false" name: cached-decision value: '{{tasks.nested-add-2-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: nested-add-2-driver.Succeeded name: nested-add-2 template: system-container-executor @@ -515,8 +570,10 @@ spec: value: '{{workflow.parameters.implementations-8f8f327236d0dee95a8f9e3066a3489084583a3ea55cfbc385dc232fa4205cc3}}' - name: task-name value: simple-add-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: simple-add-2-driver template: system-container-driver - arguments: @@ -526,45 +583,17 @@ spec: - default: "false" name: cached-decision value: '{{tasks.simple-add-2-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: simple-add-2-driver.Succeeded name: simple-add-2 template: system-container-executor inputs: parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-6 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, - 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-2 - inputs: - parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-2-iteration + name: comp-for-loop-6 outputs: {} - dag: tasks: @@ -572,27 +601,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + - name: task-name + value: for-loop-2 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-2-iteration + template: comp-for-loop-2 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-2-for-loop-2-iterator outputs: {} @@ -602,58 +633,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-6}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-6"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"pipelinechannel--for-loop-2-pipelinechannel--double-2-Output":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-2-Output","producerTask":"for-loop-2"}},"pipelinechannel--for-loop-2-pipelinechannel--double-Output":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-Output","producerTask":"for-loop-2"}}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-5","items":{"raw":"[0, - 0, 0]"}},"taskInfo":{"name":"for-loop-6"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-6 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-6-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-6}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-6"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"pipelinechannel--for-loop-2-pipelinechannel--double-2-Output":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-2-Output","producerTask":"for-loop-2"}},"pipelinechannel--for-loop-2-pipelinechannel--double-Output":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-Output","producerTask":"for-loop-2"}}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-5","items":{"raw":"[0, 0, 0]"}},"taskInfo":{"name":"for-loop-6"}}' + - name: task-name + value: for-loop-6 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-6-iteration + template: comp-for-loop-6 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-6-for-loop-6-iterator outputs: {} @@ -669,8 +671,8 @@ spec: value: '{{workflow.parameters.implementations-5983895df78feaa3fc4a33110d0aed33ca16915ec107be6f49c07e9c4c738d16}}' - name: task-name value: add-two-numbers - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: for-loop-6.Succeeded name: add-two-numbers-driver template: system-container-driver @@ -686,14 +688,14 @@ spec: template: system-container-executor - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-2 template: comp-for-loop-2-for-loop-2-iterator - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: for-loop-2.Succeeded name: for-loop-6 template: comp-for-loop-6-for-loop-6-iterator @@ -707,8 +709,8 @@ spec: value: '{{workflow.parameters.implementations-85ab7701a46287a3152375690acb08e729df2da42b3ddf49938ef1f9be7e8fb4}}' - name: task-name value: nested-add - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: for-loop-2.Succeeded name: nested-add-driver template: system-container-driver @@ -724,7 +726,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -742,8 +744,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/parameters_simple.yaml b/test_data/compiled-workflows/parameters_simple.yaml index 811d1beb22e..2899029ec4a 100644 --- a/test_data/compiled-workflows/parameters_simple.yaml +++ b/test_data/compiled-workflows/parameters_simple.yaml @@ -62,8 +62,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -90,6 +90,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -99,13 +112,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -127,6 +144,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -156,6 +181,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -166,6 +195,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -205,6 +237,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -229,8 +268,10 @@ spec: value: '{{workflow.parameters.implementations-1d9b313d7b75f93cb760d1013066d50f1f444a9ec0e72b829a76934e3cd0d765}}' - name: task-name value: double - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: double-driver template: system-container-driver - arguments: @@ -240,12 +281,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.double-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: double-driver.Succeeded name: double template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-2 outputs: {} @@ -261,8 +305,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -273,8 +317,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -287,6 +331,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -296,6 +353,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -305,8 +366,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -315,9 +376,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -326,64 +387,43 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, - 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-2 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-2-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + - name: task-name + value: for-loop-2 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-2-iteration + template: comp-for-loop-2 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-2-for-loop-2-iterator outputs: {} @@ -399,8 +439,8 @@ spec: value: '{{workflow.parameters.implementations-82d02acf69c2c3c832eb2f0d142beca2732b4d1c79b2cafacf4123407f5477ad}}' - name: task-name value: add - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: for-loop-2.Succeeded name: add-driver template: system-container-driver @@ -424,8 +464,8 @@ spec: value: '{{workflow.parameters.implementations-8bcf31b4cf23a7983bd03c19572bd597bc45d28e75853f331f473d2a615331ff}}' - name: task-name value: add-container - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: for-loop-2.Succeeded name: add-container-driver template: system-container-driver @@ -441,13 +481,13 @@ spec: template: system-container-executor - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-2 template: comp-for-loop-2-for-loop-2-iterator inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -465,8 +505,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_as_exit_task.yaml b/test_data/compiled-workflows/pipeline_as_exit_task.yaml index 9a8f53e826a..b046ba0bf56 100644 --- a/test_data/compiled-workflows/pipeline_as_exit_task.yaml +++ b/test_data/compiled-workflows/pipeline_as_exit_task.yaml @@ -78,8 +78,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -106,6 +106,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -115,13 +128,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -143,6 +160,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -172,6 +197,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -182,6 +211,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -221,6 +253,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -246,8 +285,8 @@ spec: value: '{{workflow.parameters.implementations-af17df745243f885d9854f1931988534c66b26d6c60c5279e354d6b3480dceb8}}' - name: task-name value: print-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op-driver template: system-container-driver - arguments: @@ -262,7 +301,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-1 outputs: {} @@ -278,8 +317,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -290,8 +329,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -304,6 +343,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -313,6 +365,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -322,8 +378,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -332,9 +388,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -343,14 +399,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["get-run-state"],"inputs":{"parameters":{"pipelinechannel--get-run-state-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"get-run-state"}}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--get-run-state-Output''] == ''FAILED''"}}' @@ -361,8 +425,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-1-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-1-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-1-driver.outputs.parameters.condition}}' depends: condition-1-driver.Succeeded @@ -379,8 +443,8 @@ spec: value: '{{workflow.parameters.implementations-c7345e051c66d30bc0fcd4558f91a01615e2e208dba34ad8d58d58c265056aa8}}' - name: task-name value: get-run-state - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: get-run-state-driver template: system-container-driver - arguments: @@ -395,7 +459,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-conditional-notification outputs: {} @@ -412,8 +476,8 @@ spec: value: '{{workflow.parameters.implementations-0e20296d1c94dd957c12f688261536d608c0a9430b71b2c67c664244a11161e8}}' - name: task-name value: fail-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: fail-op-driver template: system-container-driver - arguments: @@ -436,8 +500,8 @@ spec: value: '{{workflow.parameters.implementations-af17df745243f885d9854f1931988534c66b26d6c60c5279e354d6b3480dceb8}}' - name: task-name value: print-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op-driver template: system-container-driver - arguments: @@ -452,7 +516,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-exit-handler-1 outputs: {} @@ -462,8 +526,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-conditional-notification}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-conditional-notification"},"dependentTasks":["exit-handler-1"],"inputs":{"parameters":{"status":{"taskFinalStatus":{"producerTask":"exit-handler-1"}}}},"taskInfo":{"name":"conditional-notification"},"triggerPolicy":{"strategy":"ALL_UPSTREAM_TASKS_COMPLETED"}}' - name: task-name @@ -472,8 +536,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.conditional-notification-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.conditional-notification-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.conditional-notification-driver.outputs.parameters.condition}}' depends: conditional-notification-driver.Succeeded @@ -481,7 +545,7 @@ spec: template: comp-conditional-notification inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: exit-hook-root-conditional-notification outputs: {} @@ -491,8 +555,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-exit-handler-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-exit-handler-1"},"inputs":{"parameters":{"pipelinechannel--message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"my-pipeline"}}' - name: task-name @@ -501,8 +565,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.exit-handler-1-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.exit-handler-1-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.exit-handler-1-driver.outputs.parameters.condition}}' depends: exit-handler-1-driver.Succeeded @@ -510,14 +574,14 @@ spec: exit: arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' template: exit-hook-root-conditional-notification name: exit-handler-1 template: comp-exit-handler-1 inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -535,8 +599,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_in_pipeline.yaml b/test_data/compiled-workflows/pipeline_in_pipeline.yaml index 9102347c308..49a03703a47 100644 --- a/test_data/compiled-workflows/pipeline_in_pipeline.yaml +++ b/test_data/compiled-workflows/pipeline_in_pipeline.yaml @@ -47,8 +47,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -75,6 +75,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -84,13 +97,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -112,6 +129,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -141,6 +166,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -151,6 +180,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -190,6 +222,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -214,8 +253,8 @@ spec: value: '{{workflow.parameters.implementations-c4774190f368dac0d3d13c5e2481b325cb0256069205b909a0a2385615a61216}}' - name: task-name value: print-op1 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op1-driver template: system-container-driver - arguments: @@ -238,8 +277,8 @@ spec: value: '{{workflow.parameters.implementations-f6ead56828d6931739aa4610b58f9697c110a2d8723f04e95256df73a90b2348}}' - name: task-name value: print-op2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: print-op1.Succeeded name: print-op2-driver template: system-container-driver @@ -255,7 +294,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-inner-pipeline outputs: {} @@ -271,8 +310,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -283,8 +322,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -297,6 +336,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -306,6 +358,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -315,8 +371,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -325,9 +381,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -336,14 +392,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-inner-pipeline}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-inner-pipeline"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"world"}}}},"taskInfo":{"name":"inner-pipeline"}}' - name: task-name @@ -352,8 +416,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.inner-pipeline-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.inner-pipeline-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.inner-pipeline-driver.outputs.parameters.condition}}' depends: inner-pipeline-driver.Succeeded @@ -369,8 +433,8 @@ spec: value: '{{workflow.parameters.implementations-c4774190f368dac0d3d13c5e2481b325cb0256069205b909a0a2385615a61216}}' - name: task-name value: print-op1 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op1-driver template: system-container-driver - arguments: @@ -385,7 +449,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -403,8 +467,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_in_pipeline_complex.yaml b/test_data/compiled-workflows/pipeline_in_pipeline_complex.yaml index ab28fb406c9..becb3db5b5c 100644 --- a/test_data/compiled-workflows/pipeline_in_pipeline_complex.yaml +++ b/test_data/compiled-workflows/pipeline_in_pipeline_complex.yaml @@ -56,8 +56,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -84,6 +84,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -93,13 +106,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -121,6 +138,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -150,6 +175,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -160,6 +189,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -199,6 +231,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -223,8 +262,8 @@ spec: value: '{{workflow.parameters.implementations-f6ead56828d6931739aa4610b58f9697c110a2d8723f04e95256df73a90b2348}}' - name: task-name value: print-op2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op2-driver template: system-container-driver - arguments: @@ -239,7 +278,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-1 outputs: {} @@ -255,8 +294,8 @@ spec: value: '{{workflow.parameters.implementations-f6ead56828d6931739aa4610b58f9697c110a2d8723f04e95256df73a90b2348}}' - name: task-name value: print-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op-driver template: system-container-driver - arguments: @@ -271,7 +310,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-2 outputs: {} @@ -287,8 +326,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -299,8 +338,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -313,6 +352,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -322,6 +374,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -331,8 +387,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -341,9 +397,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -352,14 +408,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["print-op1"],"inputs":{"parameters":{"pipelinechannel--print-op1-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"print-op1"}}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--print-op1-Output''] == ''Hello''"}}' @@ -370,8 +434,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-1-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-1-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-1-driver.outputs.parameters.condition}}' depends: condition-1-driver.Succeeded @@ -382,8 +446,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-2"},"dependentTasks":["print-op1"],"inputs":{"parameters":{"pipelinechannel--print-op1-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"print-op1"}}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--print-op1-Output''] != ''Hello''"}}' @@ -394,8 +458,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-2-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-2-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-2-driver.outputs.parameters.condition}}' depends: condition-2-driver.Succeeded @@ -412,8 +476,8 @@ spec: value: '{{workflow.parameters.implementations-c4774190f368dac0d3d13c5e2481b325cb0256069205b909a0a2385615a61216}}' - name: task-name value: print-op1 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op1-driver template: system-container-driver - arguments: @@ -428,7 +492,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-inner-pipeline outputs: {} @@ -438,59 +502,33 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-inner-pipeline}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-inner-pipeline"},"inputs":{"parameters":{"msg":{"componentInputParameter":"pipelinechannel--loop-item-param-1"}}},"taskInfo":{"name":"inner-pipeline"}}' - name: task-name value: inner-pipeline + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: inner-pipeline-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.inner-pipeline-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.inner-pipeline-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.inner-pipeline-driver.outputs.parameters.condition}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: inner-pipeline-driver.Succeeded name: inner-pipeline template: comp-inner-pipeline inputs: parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-2 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[\"Hello\", - \"world!\"]"}},"taskInfo":{"name":"for-loop-2"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-2 - inputs: - parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-2-iteration + name: comp-for-loop-2 outputs: {} - dag: tasks: @@ -498,27 +536,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[\"Hello\", \"world!\"]"}},"taskInfo":{"name":"for-loop-2"}}' + - name: task-name + value: for-loop-2 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-2-iteration + template: comp-for-loop-2 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-2-for-loop-2-iterator outputs: {} @@ -526,8 +566,8 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-2 template: comp-for-loop-2-for-loop-2-iterator - arguments: @@ -540,8 +580,8 @@ spec: value: '{{workflow.parameters.implementations-c4774190f368dac0d3d13c5e2481b325cb0256069205b909a0a2385615a61216}}' - name: task-name value: print-op1 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op1-driver template: system-container-driver - arguments: @@ -556,7 +596,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -574,8 +614,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_in_pipeline_loaded_from_yaml.yaml b/test_data/compiled-workflows/pipeline_in_pipeline_loaded_from_yaml.yaml index 5d9d8a8929b..5ee2bd65d8d 100644 --- a/test_data/compiled-workflows/pipeline_in_pipeline_loaded_from_yaml.yaml +++ b/test_data/compiled-workflows/pipeline_in_pipeline_loaded_from_yaml.yaml @@ -63,8 +63,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -91,6 +91,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -100,13 +113,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -128,6 +145,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -157,6 +182,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -167,6 +196,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -206,6 +238,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -230,8 +269,8 @@ spec: value: '{{workflow.parameters.implementations-c4774190f368dac0d3d13c5e2481b325cb0256069205b909a0a2385615a61216}}' - name: task-name value: print-op1 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op1-driver template: system-container-driver - arguments: @@ -254,8 +293,8 @@ spec: value: '{{workflow.parameters.implementations-3f2ba2471e2638f8b27a01f4563316e6e3e8c02bae402875e95b5b86e8de1ddc}}' - name: task-name value: print-op2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: print-op1.Succeeded name: print-op2-driver template: system-container-driver @@ -271,7 +310,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-inner-pipeline outputs: {} @@ -287,8 +326,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -299,8 +338,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -313,6 +352,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -322,6 +374,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -331,8 +387,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -341,9 +397,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -352,14 +408,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-inner-pipeline}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-inner-pipeline"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"world"}}}},"taskInfo":{"name":"inner-pipeline"}}' - name: task-name @@ -368,8 +432,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.inner-pipeline-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.inner-pipeline-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.inner-pipeline-driver.outputs.parameters.condition}}' depends: inner-pipeline-driver.Succeeded @@ -385,8 +449,8 @@ spec: value: '{{workflow.parameters.implementations-c4774190f368dac0d3d13c5e2481b325cb0256069205b909a0a2385615a61216}}' - name: task-name value: print-op1 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op1-driver template: system-container-driver - arguments: @@ -401,7 +465,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-pipeline-in-pipeline outputs: {} @@ -411,8 +475,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-pipeline-in-pipeline}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-pipeline-in-pipeline"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"Hello"}}}},"taskInfo":{"name":"pipeline-in-pipeline"}}' - name: task-name @@ -421,8 +485,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.pipeline-in-pipeline-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.pipeline-in-pipeline-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.pipeline-in-pipeline-driver.outputs.parameters.condition}}' depends: pipeline-in-pipeline-driver.Succeeded @@ -438,8 +502,8 @@ spec: value: '{{workflow.parameters.implementations-d86fa51b3af1d2cdca561c7f7cdf7534896739e113579b081fa96e6c40e113c1}}' - name: task-name value: print-op1 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: pipeline-in-pipeline.Succeeded name: print-op1-driver template: system-container-driver @@ -455,7 +519,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -473,8 +537,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_producer_consumer.yaml b/test_data/compiled-workflows/pipeline_producer_consumer.yaml index b282a4691cc..4ba37d7f1a2 100644 --- a/test_data/compiled-workflows/pipeline_producer_consumer.yaml +++ b/test_data/compiled-workflows/pipeline_producer_consumer.yaml @@ -94,8 +94,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -122,6 +122,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -131,13 +144,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -159,6 +176,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -188,6 +213,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -198,6 +227,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -237,6 +269,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -261,8 +300,10 @@ spec: value: '{{workflow.parameters.implementations-5c5f855eb8bb4260342c9f40e736b35407a28d55966e30615d053e6a28f07fed}}' - name: task-name value: echo-and-return - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: echo-and-return-driver template: system-container-driver - arguments: @@ -272,12 +313,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.echo-and-return-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: echo-and-return-driver.Succeeded name: echo-and-return template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-2-2 outputs: {} @@ -293,8 +337,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -305,8 +349,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -319,6 +363,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -328,6 +385,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -337,8 +398,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -347,9 +408,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -358,64 +419,43 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-2-2}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-2-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[\"m\", - \"a\", \"t\", \"h\"]"}},"taskInfo":{"name":"for-loop-2"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-2-2 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-2-2-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-2-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-2-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[\"m\", \"a\", \"t\", \"h\"]"}},"taskInfo":{"name":"for-loop-2"}}' + - name: task-name + value: for-loop-2 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-2-2-iteration + template: comp-for-loop-2-2 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-2-2-for-loop-2-iterator outputs: {} @@ -431,8 +471,8 @@ spec: value: '{{workflow.parameters.implementations-1be5f8f8f37d8780d2254662338636e64bb02c2cd31f4099b84c292a06f04cd3}}' - name: task-name value: add - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: add-driver template: system-container-driver - arguments: @@ -447,13 +487,13 @@ spec: template: system-container-executor - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-2 template: comp-for-loop-2-2-for-loop-2-iterator inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-add-pipeline outputs: {} @@ -469,8 +509,8 @@ spec: value: '{{workflow.parameters.implementations-f79f58ed4b630aadad448078027429b1d8053e45ed52bed5961317a8e3ebf5e3}}' - name: task-name value: double - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: double-driver template: system-container-driver - arguments: @@ -485,7 +525,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-double-pipeline outputs: {} @@ -495,59 +535,33 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-double-pipeline}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-double-pipeline"},"inputs":{"parameters":{"num":{"componentInputParameter":"pipelinechannel--loop-item-param-3"}}},"taskInfo":{"name":"double-pipeline"}}' - name: task-name value: double-pipeline + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: double-pipeline-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.double-pipeline-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.double-pipeline-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.double-pipeline-driver.outputs.parameters.condition}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: double-pipeline-driver.Succeeded name: double-pipeline template: comp-double-pipeline inputs: parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-4 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-4}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[1, - 2, 3]"}},"taskInfo":{"name":"for-loop-4"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-4 - inputs: - parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-4-iteration + name: comp-for-loop-4 outputs: {} - dag: tasks: @@ -555,27 +569,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-4}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[1, 2, 3]"}},"taskInfo":{"name":"for-loop-4"}}' + - name: task-name + value: for-loop-4 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-4-iteration + template: comp-for-loop-4 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-4-for-loop-4-iterator outputs: {} @@ -583,46 +599,18 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - name: for-loop-4 - template: comp-for-loop-4-for-loop-4-iterator - inputs: - parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-2 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-2}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: iteration-index value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, - 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-2 + name: for-loop-4 + template: comp-for-loop-4-for-loop-4-iterator inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-2-iteration + name: comp-for-loop-2 outputs: {} - dag: tasks: @@ -630,27 +618,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, 2, 3]"}},"taskInfo":{"name":"for-loop-2"}}' + - name: task-name + value: for-loop-2 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-2-iteration + template: comp-for-loop-2 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-2-for-loop-2-iterator outputs: {} @@ -660,8 +650,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-add-pipeline}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-add-pipeline"},"dependentTasks":["for-loop-2"],"inputs":{"parameters":{"nums":{"taskOutputParameter":{"outputParameterKey":"pipelinechannel--double-pipeline-Output","producerTask":"for-loop-2"}}}},"taskInfo":{"name":"add-pipeline"}}' - name: task-name @@ -671,8 +661,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.add-pipeline-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.add-pipeline-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.add-pipeline-driver.outputs.parameters.condition}}' depends: add-pipeline-driver.Succeeded @@ -680,8 +670,8 @@ spec: template: comp-add-pipeline - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-2 template: comp-for-loop-2-for-loop-2-iterator - arguments: @@ -694,8 +684,8 @@ spec: value: '{{workflow.parameters.implementations-62336025dcddfbc82e6c78051d9dbd9253225e4f399ded7fb486aabb7c96645a}}' - name: task-name value: join-and-print - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: add-pipeline.Succeeded name: join-and-print-driver template: system-container-driver @@ -711,7 +701,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -729,8 +719,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_after.yaml b/test_data/compiled-workflows/pipeline_with_after.yaml index 806dc496243..062e2b504ca 100644 --- a/test_data/compiled-workflows/pipeline_with_after.yaml +++ b/test_data/compiled-workflows/pipeline_with_after.yaml @@ -35,8 +35,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -63,6 +63,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -72,13 +85,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -100,6 +117,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -129,6 +154,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -139,6 +168,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -178,6 +210,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -203,8 +242,8 @@ spec: value: '{{workflow.parameters.implementations-e9d96c084390adff2ea878debaa8ca2fd68a3c14c3349e300ea0c5deac669806}}' - name: task-name value: print-text - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-text-driver template: system-container-driver - arguments: @@ -228,8 +267,8 @@ spec: value: '{{workflow.parameters.implementations-e9d96c084390adff2ea878debaa8ca2fd68a3c14c3349e300ea0c5deac669806}}' - name: task-name value: print-text-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: print-text.Succeeded name: print-text-2-driver template: system-container-driver @@ -254,8 +293,8 @@ spec: value: '{{workflow.parameters.implementations-e9d96c084390adff2ea878debaa8ca2fd68a3c14c3349e300ea0c5deac669806}}' - name: task-name value: print-text-3 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: print-text.Succeeded && print-text-2.Succeeded name: print-text-3-driver template: system-container-driver @@ -271,7 +310,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -287,8 +326,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -299,8 +338,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -313,6 +352,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -322,6 +374,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -331,8 +387,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -341,9 +397,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -352,6 +408,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -366,8 +430,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_artifact_custom_path.yaml b/test_data/compiled-workflows/pipeline_with_artifact_custom_path.yaml index 6477f37882e..6ae284dce8e 100644 --- a/test_data/compiled-workflows/pipeline_with_artifact_custom_path.yaml +++ b/test_data/compiled-workflows/pipeline_with_artifact_custom_path.yaml @@ -63,8 +63,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -91,6 +91,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -100,13 +113,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -128,6 +145,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -157,6 +182,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -167,6 +196,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -206,6 +238,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -230,8 +269,8 @@ spec: value: '{{workflow.parameters.implementations-0692d431e617fbe942c422f90e6bc3e379106797bac2421b5f1889953b36c4bd}}' - name: task-name value: component-output-artifact - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-output-artifact-driver template: system-container-driver - arguments: @@ -254,8 +293,8 @@ spec: value: '{{workflow.parameters.implementations-0641839dce68d0225ee497406363513592d841c856891f7313f5368a99e8a50d}}' - name: task-name value: validate-input-artifact - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: component-output-artifact.Succeeded name: validate-input-artifact-driver template: system-container-driver @@ -271,7 +310,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -287,8 +326,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -299,8 +338,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -313,6 +352,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -322,6 +374,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -331,8 +387,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -341,9 +397,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -352,6 +408,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -366,8 +430,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_artifact_upload_download.yaml b/test_data/compiled-workflows/pipeline_with_artifact_upload_download.yaml index d0c739acf33..a138519f0ec 100644 --- a/test_data/compiled-workflows/pipeline_with_artifact_upload_download.yaml +++ b/test_data/compiled-workflows/pipeline_with_artifact_upload_download.yaml @@ -62,8 +62,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -90,6 +90,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -99,13 +112,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -127,6 +144,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -156,6 +181,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -166,6 +195,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -205,6 +237,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -229,8 +268,8 @@ spec: value: '{{workflow.parameters.implementations-9881acc01330f69427ffe30acddda484b63239f267d7036beb6070353fda316b}}' - name: task-name value: download-dataset-and-upload-as-artifact - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: download-dataset-and-upload-as-artifact-driver template: system-container-driver - arguments: @@ -253,8 +292,8 @@ spec: value: '{{workflow.parameters.implementations-c020e8bb66a4c390268500295dd49b3f2dcf34bea30556dd91446c1696154c08}}' - name: task-name value: print-dataset-info - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: download-dataset-and-upload-as-artifact.Succeeded name: print-dataset-info-driver template: system-container-driver @@ -270,7 +309,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -286,8 +325,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -298,8 +337,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -312,6 +351,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -321,6 +373,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -330,8 +386,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -340,9 +396,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -351,6 +407,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -365,8 +429,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_concat_placeholder.yaml b/test_data/compiled-workflows/pipeline_with_concat_placeholder.yaml index f36e331e066..2d06b76e6ca 100644 --- a/test_data/compiled-workflows/pipeline_with_concat_placeholder.yaml +++ b/test_data/compiled-workflows/pipeline_with_concat_placeholder.yaml @@ -34,8 +34,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -62,6 +62,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -71,13 +84,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -99,6 +116,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -128,6 +153,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -138,6 +167,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -177,6 +209,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -201,8 +240,8 @@ spec: value: '{{workflow.parameters.implementations-7c45df434efc6e206df6b6c77b2f4f28cdab5c72c61199ad34c83ad5f3ecd460}}' - name: task-name value: component-with-concat-placeholder - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-with-concat-placeholder-driver template: system-container-driver - arguments: @@ -217,7 +256,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -233,8 +272,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -245,8 +284,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -259,6 +298,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -268,6 +320,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -277,8 +333,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -287,9 +343,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -298,6 +354,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -312,8 +376,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_condition.yaml b/test_data/compiled-workflows/pipeline_with_condition.yaml index 7fc3d76bcd4..e658eb9efec 100644 --- a/test_data/compiled-workflows/pipeline_with_condition.yaml +++ b/test_data/compiled-workflows/pipeline_with_condition.yaml @@ -60,8 +60,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -88,6 +88,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -97,13 +110,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -125,6 +142,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -154,6 +179,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -164,6 +193,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -203,6 +235,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -227,8 +266,8 @@ spec: value: '{{workflow.parameters.implementations-663f234c274873c6b1c76e2f91561ee25169ff505cfe45310e51e4b0265a2d98}}' - name: task-name value: flip-coin-op-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: flip-coin-op-2-driver template: system-container-driver - arguments: @@ -251,8 +290,8 @@ spec: value: '{{workflow.parameters.implementations-0a97e2e381526d8572c8a0a776550e66305543ba3a2032eb60de0ef3e42504b5}}' - name: task-name value: print-op-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: flip-coin-op-2.Succeeded name: print-op-2-driver template: system-container-driver @@ -276,8 +315,8 @@ spec: value: '{{workflow.parameters.implementations-0a97e2e381526d8572c8a0a776550e66305543ba3a2032eb60de0ef3e42504b5}}' - name: task-name value: print-op-3 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op-3-driver template: system-container-driver - arguments: @@ -292,7 +331,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-1 outputs: {} @@ -308,8 +347,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -320,8 +359,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -334,6 +373,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -343,6 +395,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -352,8 +408,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -362,9 +418,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -373,14 +429,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["flip-coin-op"],"inputs":{"parameters":{"pipelinechannel--flip-coin-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op"}},"pipelinechannel--text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-op-Output''] == ''heads''"}}' @@ -391,8 +455,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-1-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-1-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-1-driver.outputs.parameters.condition}}' depends: condition-1-driver.Succeeded @@ -409,8 +473,8 @@ spec: value: '{{workflow.parameters.implementations-663f234c274873c6b1c76e2f91561ee25169ff505cfe45310e51e4b0265a2d98}}' - name: task-name value: flip-coin-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: flip-coin-op-driver template: system-container-driver - arguments: @@ -433,8 +497,8 @@ spec: value: '{{workflow.parameters.implementations-0a97e2e381526d8572c8a0a776550e66305543ba3a2032eb60de0ef3e42504b5}}' - name: task-name value: print-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: flip-coin-op.Succeeded name: print-op-driver template: system-container-driver @@ -450,7 +514,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -468,8 +532,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_condition_dynamic_task_output_custom_training_job.yaml b/test_data/compiled-workflows/pipeline_with_condition_dynamic_task_output_custom_training_job.yaml index 45f4a718c24..737bd900a63 100644 --- a/test_data/compiled-workflows/pipeline_with_condition_dynamic_task_output_custom_training_job.yaml +++ b/test_data/compiled-workflows/pipeline_with_condition_dynamic_task_output_custom_training_job.yaml @@ -157,8 +157,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -185,6 +185,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -194,13 +207,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -222,6 +239,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -251,6 +276,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -261,6 +290,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -300,6 +332,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -324,8 +363,8 @@ spec: value: '{{workflow.parameters.implementations-20246c97a11cfe4c823ed6719ceb5c3f1d11943e4553ba99a2121c27711478b5}}' - name: task-name value: custom-training-job - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: custom-training-job-driver template: system-container-driver - arguments: @@ -340,7 +379,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-1 outputs: {} @@ -356,8 +395,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -368,8 +407,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -382,6 +421,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -391,6 +443,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -400,8 +456,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -410,9 +466,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -421,6 +477,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -433,8 +497,8 @@ spec: value: '{{workflow.parameters.implementations-b644111ca20a6ae3d9e716f9eda564a01b648ac879e153b674558d9c9a9da6e8}}' - name: task-name value: accelerator-count - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: accelerator-count-driver template: system-container-driver - arguments: @@ -457,8 +521,8 @@ spec: value: '{{workflow.parameters.implementations-f9a25b0b0eb8f4240e030d4007d8ba04dd922f7ac266816c8751d15ad42c5236}}' - name: task-name value: accelerator-type - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: accelerator-type-driver template: system-container-driver - arguments: @@ -475,8 +539,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["accelerator-count","accelerator-type","flip-biased-coin-op","machine-type"],"inputs":{"parameters":{"pipelinechannel--accelerator-count-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-count"}},"pipelinechannel--accelerator-type-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-type"}},"pipelinechannel--encryption_spec_key_name":{"componentInputParameter":"encryption_spec_key_name"},"pipelinechannel--flip-biased-coin-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-biased-coin-op"}},"pipelinechannel--location":{"componentInputParameter":"location"},"pipelinechannel--machine-type-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"machine-type"}},"pipelinechannel--project":{"componentInputParameter":"project"}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-biased-coin-op-Output''] == ''heads''"}}' @@ -488,8 +552,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-1-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-1-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-1-driver.outputs.parameters.condition}}' depends: condition-1-driver.Succeeded @@ -506,8 +570,8 @@ spec: value: '{{workflow.parameters.implementations-0c259210b1fb84dd1577b42a94ae79f0619227d7d05e5f54ffe040c596a5439c}}' - name: task-name value: flip-biased-coin-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: flip-biased-coin-op-driver template: system-container-driver - arguments: @@ -530,8 +594,8 @@ spec: value: '{{workflow.parameters.implementations-a8764ade5a7c2d0a4863791e694b80370ee266ca37f2aa429d49e94e203e1715}}' - name: task-name value: machine-type - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: machine-type-driver template: system-container-driver - arguments: @@ -546,7 +610,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -564,8 +628,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_dynamic_importer_metadata.yaml b/test_data/compiled-workflows/pipeline_with_dynamic_importer_metadata.yaml index 5cf44ee7829..f1dbbc0d87c 100644 --- a/test_data/compiled-workflows/pipeline_with_dynamic_importer_metadata.yaml +++ b/test_data/compiled-workflows/pipeline_with_dynamic_importer_metadata.yaml @@ -41,26 +41,20 @@ spec: args: - --executor_type - importer - - --task_spec - - '{{inputs.parameters.task}}' - - --component_spec - - '{{inputs.parameters.component}}' + - --task_name + - '{{inputs.parameters.task-name}}' - --importer_spec - '{{inputs.parameters.importer}}' - --pipeline_name - pipeline-with-importer - --run_id - '{{workflow.uid}}' - - --parent_dag_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --pod_name - $(KFP_POD_NAME) - --pod_uid - $(KFP_POD_UID) - - --mlmd_server_address - - $(METADATA_GRPC_SERVICE_HOST) - - --mlmd_server_port - - $(METADATA_GRPC_SERVICE_PORT) command: - launcher-v2 env: @@ -72,6 +66,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -85,15 +83,26 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - - name: task - - name: component + - name: task-name - name: importer - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: system-importer outputs: {} + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - container: args: - --type @@ -106,8 +115,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -134,6 +143,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -143,13 +165,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -171,6 +197,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -200,6 +234,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -210,6 +248,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -249,6 +290,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -265,26 +313,22 @@ spec: tasks: - arguments: parameters: - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"metadata":{"componentInputParameter":"name"},"metadata-2":{"componentInputParameter":"pipeline_input_image_uri"},"uri":{"componentInputParameter":"pipeline_input_artifact_uri"}}},"taskInfo":{"name":"importer"}}' - - name: component - value: '{{workflow.parameters.components-comp-importer}}' + - name: task-name + value: importer - name: importer value: '{{workflow.parameters.implementations-comp-importer}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: importer template: system-importer - arguments: parameters: - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer-2"},"dependentTasks":["make-name"],"inputs":{"parameters":{"metadata":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"make-name"}},"metadata-2":{"componentInputParameter":"name"},"metadata-3":{"componentInputParameter":"int_input"},"uri":{"runtimeValue":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"}}}},"taskInfo":{"name":"importer-2"}}' - - name: component - value: '{{workflow.parameters.components-comp-importer-2}}' + - name: task-name + value: importer-2 - name: importer value: '{{workflow.parameters.implementations-comp-importer-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: importer-2 template: system-importer - arguments: @@ -297,8 +341,8 @@ spec: value: '{{workflow.parameters.implementations-1f5c6ec51212707117cf959e66eed87794baf9ac92c3f57c43a8e3ce06397f03}}' - name: task-name value: make-name - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: make-name-driver template: system-container-driver - arguments: @@ -313,7 +357,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -329,8 +373,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -341,8 +385,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -355,6 +399,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -364,6 +421,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -373,8 +434,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -383,9 +444,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -394,6 +455,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -408,8 +477,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_dynamic_task_output_custom_training_job.yaml b/test_data/compiled-workflows/pipeline_with_dynamic_task_output_custom_training_job.yaml index ed2d187edec..fb1052ad11e 100644 --- a/test_data/compiled-workflows/pipeline_with_dynamic_task_output_custom_training_job.yaml +++ b/test_data/compiled-workflows/pipeline_with_dynamic_task_output_custom_training_job.yaml @@ -119,8 +119,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -147,6 +147,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -156,13 +169,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -184,6 +201,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -213,6 +238,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -223,6 +252,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -262,6 +294,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -286,8 +325,8 @@ spec: value: '{{workflow.parameters.implementations-b644111ca20a6ae3d9e716f9eda564a01b648ac879e153b674558d9c9a9da6e8}}' - name: task-name value: accelerator-count - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: accelerator-count-driver template: system-container-driver - arguments: @@ -310,8 +349,8 @@ spec: value: '{{workflow.parameters.implementations-f9a25b0b0eb8f4240e030d4007d8ba04dd922f7ac266816c8751d15ad42c5236}}' - name: task-name value: accelerator-type - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: accelerator-type-driver template: system-container-driver - arguments: @@ -334,8 +373,8 @@ spec: value: '{{workflow.parameters.implementations-52a7e68811aea0ab261ad8816e36b43d20ed63e304bf491e77d3a1ac7b6e5871}}' - name: task-name value: custom-training-job - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: accelerator-count.Succeeded && accelerator-type.Succeeded && machine-type.Succeeded name: custom-training-job-driver template: system-container-driver @@ -359,8 +398,8 @@ spec: value: '{{workflow.parameters.implementations-a8764ade5a7c2d0a4863791e694b80370ee266ca37f2aa429d49e94e203e1715}}' - name: task-name value: machine-type - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: machine-type-driver template: system-container-driver - arguments: @@ -375,7 +414,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -391,8 +430,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -403,8 +442,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -417,6 +456,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -426,6 +478,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -435,8 +491,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -445,9 +501,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -456,6 +512,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -470,8 +534,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_env.yaml b/test_data/compiled-workflows/pipeline_with_env.yaml index 0d7f8083e6b..0d4da744d70 100644 --- a/test_data/compiled-workflows/pipeline_with_env.yaml +++ b/test_data/compiled-workflows/pipeline_with_env.yaml @@ -47,8 +47,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -75,6 +75,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -84,13 +97,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -112,6 +129,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -141,6 +166,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -151,6 +180,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -190,6 +222,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -214,8 +253,8 @@ spec: value: '{{workflow.parameters.implementations-507ec52d0dba96e88845f3ca614b4262d4b5b44fa204a91cd13cb60c8a78d4fe}}' - name: task-name value: print-env - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-env-driver template: system-container-driver - arguments: @@ -238,8 +277,8 @@ spec: value: '{{workflow.parameters.implementations-dba9846467b474d60f2e0c7f2251f75735fc65a98ecebf7ffcedbd015595be80}}' - name: task-name value: print-env-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-env-op-driver template: system-container-driver - arguments: @@ -254,7 +293,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -270,8 +309,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -282,8 +321,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -296,6 +335,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -305,6 +357,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -314,8 +370,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -324,9 +380,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -335,6 +391,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -349,8 +413,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_exit_handler.yaml b/test_data/compiled-workflows/pipeline_with_exit_handler.yaml index 527cb00d03b..1b088cdb572 100644 --- a/test_data/compiled-workflows/pipeline_with_exit_handler.yaml +++ b/test_data/compiled-workflows/pipeline_with_exit_handler.yaml @@ -59,8 +59,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -87,6 +87,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -96,13 +109,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -124,6 +141,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -153,6 +178,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -163,6 +192,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -202,6 +234,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -227,8 +266,8 @@ spec: value: '{{workflow.parameters.implementations-0e20296d1c94dd957c12f688261536d608c0a9430b71b2c67c664244a11161e8}}' - name: task-name value: fail-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: fail-op-driver template: system-container-driver - arguments: @@ -251,8 +290,8 @@ spec: value: '{{workflow.parameters.implementations-af17df745243f885d9854f1931988534c66b26d6c60c5279e354d6b3480dceb8}}' - name: task-name value: print-op-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op-2-driver template: system-container-driver - arguments: @@ -267,7 +306,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-exit-handler-1 outputs: {} @@ -284,8 +323,8 @@ spec: value: '{{workflow.parameters.implementations-af17df745243f885d9854f1931988534c66b26d6c60c5279e354d6b3480dceb8}}' - name: task-name value: print-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op-driver template: system-container-driver - arguments: @@ -300,7 +339,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: exit-hook-root-print-op outputs: {} @@ -316,8 +355,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -328,8 +367,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -342,6 +381,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -351,6 +403,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -360,8 +416,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -370,9 +426,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -381,14 +437,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-exit-handler-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-exit-handler-1"},"inputs":{"parameters":{"pipelinechannel--message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"exit-handler-1"}}' - name: task-name @@ -397,8 +461,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.exit-handler-1-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.exit-handler-1-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.exit-handler-1-driver.outputs.parameters.condition}}' depends: exit-handler-1-driver.Succeeded @@ -406,14 +470,14 @@ spec: exit: arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' template: exit-hook-root-print-op name: exit-handler-1 template: comp-exit-handler-1 inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -431,8 +495,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_google_artifact_type.yaml b/test_data/compiled-workflows/pipeline_with_google_artifact_type.yaml index bed4515a363..3c8604969d6 100644 --- a/test_data/compiled-workflows/pipeline_with_google_artifact_type.yaml +++ b/test_data/compiled-workflows/pipeline_with_google_artifact_type.yaml @@ -59,26 +59,20 @@ spec: args: - --executor_type - importer - - --task_spec - - '{{inputs.parameters.task}}' - - --component_spec - - '{{inputs.parameters.component}}' + - --task_name + - '{{inputs.parameters.task-name}}' - --importer_spec - '{{inputs.parameters.importer}}' - --pipeline_name - pipeline-with-google-types - --run_id - '{{workflow.uid}}' - - --parent_dag_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --pod_name - $(KFP_POD_NAME) - --pod_uid - $(KFP_POD_UID) - - --mlmd_server_address - - $(METADATA_GRPC_SERVICE_HOST) - - --mlmd_server_port - - $(METADATA_GRPC_SERVICE_PORT) command: - launcher-v2 env: @@ -90,6 +84,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -103,15 +101,26 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - - name: task - - name: component + - name: task-name - name: importer - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: system-importer outputs: {} + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - container: args: - --type @@ -124,8 +133,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -152,6 +161,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -161,13 +183,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -189,6 +215,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -218,6 +252,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -228,6 +266,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -267,6 +308,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -283,14 +331,12 @@ spec: tasks: - arguments: parameters: - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"uri":{"runtimeValue":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"}}}},"taskInfo":{"name":"importer"}}' - - name: component - value: '{{workflow.parameters.components-comp-importer}}' + - name: task-name + value: importer - name: importer value: '{{workflow.parameters.implementations-comp-importer}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: importer template: system-importer - arguments: @@ -303,8 +349,8 @@ spec: value: '{{workflow.parameters.implementations-d95089443b1478c8651b4c7907aff4ad91082dd878a05b164608a7958a55d36e}}' - name: task-name value: model-consumer - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: importer.Succeeded && model-producer.Succeeded name: model-consumer-driver template: system-container-driver @@ -328,8 +374,8 @@ spec: value: '{{workflow.parameters.implementations-e9925d1cd495bcfa47d9908820b0f2cbd7304b8cd8e49bc581e7ec0d2e8b0d94}}' - name: task-name value: model-producer - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: model-producer-driver template: system-container-driver - arguments: @@ -344,7 +390,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -360,8 +406,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -372,8 +418,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -386,6 +432,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -395,6 +454,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -404,8 +467,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -414,9 +477,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -425,6 +488,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -439,8 +510,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_importer.yaml b/test_data/compiled-workflows/pipeline_with_importer.yaml index 4335f8d8827..bf774ff743a 100644 --- a/test_data/compiled-workflows/pipeline_with_importer.yaml +++ b/test_data/compiled-workflows/pipeline_with_importer.yaml @@ -49,26 +49,20 @@ spec: args: - --executor_type - importer - - --task_spec - - '{{inputs.parameters.task}}' - - --component_spec - - '{{inputs.parameters.component}}' + - --task_name + - '{{inputs.parameters.task-name}}' - --importer_spec - '{{inputs.parameters.importer}}' - --pipeline_name - pipeline-with-importer - --run_id - '{{workflow.uid}}' - - --parent_dag_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --pod_name - $(KFP_POD_NAME) - --pod_uid - $(KFP_POD_UID) - - --mlmd_server_address - - $(METADATA_GRPC_SERVICE_HOST) - - --mlmd_server_port - - $(METADATA_GRPC_SERVICE_PORT) command: - launcher-v2 env: @@ -80,6 +74,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -93,15 +91,26 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - - name: task - - name: component + - name: task-name - name: importer - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: system-importer outputs: {} + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - container: args: - --type @@ -114,8 +123,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -142,6 +151,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -151,13 +173,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -179,6 +205,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -208,6 +242,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -218,6 +256,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -257,6 +298,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -273,14 +321,12 @@ spec: tasks: - arguments: parameters: - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer-2"},"inputs":{"parameters":{"uri":{"componentInputParameter":"pipelinechannel--dataset2"}}},"taskInfo":{"name":"importer-2"}}' - - name: component - value: '{{workflow.parameters.components-comp-importer-2}}' + - name: task-name + value: importer-2 - name: importer value: '{{workflow.parameters.implementations-comp-importer-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: importer-2 template: system-importer - arguments: @@ -293,8 +339,8 @@ spec: value: '{{workflow.parameters.implementations-3fe46efe149977fb8647a792ff4ac104e78f0aacef1094c802f8cdaf3f3034b0}}' - name: task-name value: train-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: importer-2.Succeeded name: train-2-driver template: system-container-driver @@ -310,7 +356,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-1 outputs: {} @@ -326,8 +372,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -338,8 +384,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -352,6 +398,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -361,6 +420,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -370,8 +433,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -380,9 +443,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -391,14 +454,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["train"],"inputs":{"parameters":{"pipelinechannel--dataset2":{"componentInputParameter":"dataset2"},"pipelinechannel--train-scalar":{"taskOutputParameter":{"outputParameterKey":"scalar","producerTask":"train"}}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--train-scalar''] == ''123''"}}' @@ -409,8 +480,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-1-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-1-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-1-driver.outputs.parameters.condition}}' depends: condition-1-driver.Succeeded @@ -419,14 +490,12 @@ spec: when: '{{tasks.condition-1-driver.outputs.parameters.condition}} != false' - arguments: parameters: - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"uri":{"runtimeValue":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"}}}},"taskInfo":{"name":"importer"}}' - - name: component - value: '{{workflow.parameters.components-comp-importer}}' + - name: task-name + value: importer - name: importer value: '{{workflow.parameters.implementations-comp-importer}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: importer template: system-importer - arguments: @@ -439,8 +508,8 @@ spec: value: '{{workflow.parameters.implementations-3fe46efe149977fb8647a792ff4ac104e78f0aacef1094c802f8cdaf3f3034b0}}' - name: task-name value: train - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: importer.Succeeded name: train-driver template: system-container-driver @@ -456,7 +525,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -474,8 +543,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_importer_and_gcpc_types.yaml b/test_data/compiled-workflows/pipeline_with_importer_and_gcpc_types.yaml index 1775248e6d1..4042062e2aa 100644 --- a/test_data/compiled-workflows/pipeline_with_importer_and_gcpc_types.yaml +++ b/test_data/compiled-workflows/pipeline_with_importer_and_gcpc_types.yaml @@ -36,8 +36,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -64,6 +64,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -73,13 +86,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -101,6 +118,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -130,6 +155,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -140,6 +169,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -179,6 +211,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -195,26 +234,20 @@ spec: args: - --executor_type - importer - - --task_spec - - '{{inputs.parameters.task}}' - - --component_spec - - '{{inputs.parameters.component}}' + - --task_name + - '{{inputs.parameters.task-name}}' - --importer_spec - '{{inputs.parameters.importer}}' - --pipeline_name - pipeline-with-importer-and-gcpc-type - --run_id - '{{workflow.uid}}' - - --parent_dag_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --pod_name - $(KFP_POD_NAME) - --pod_uid - $(KFP_POD_UID) - - --mlmd_server_address - - $(METADATA_GRPC_SERVICE_HOST) - - --mlmd_server_port - - $(METADATA_GRPC_SERVICE_PORT) command: - launcher-v2 env: @@ -226,6 +259,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -239,15 +276,26 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - - name: task - - name: component + - name: task-name - name: importer - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: system-importer outputs: {} + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -260,8 +308,8 @@ spec: value: '{{workflow.parameters.implementations-9422f7f39ffcd22b988c9b7896b4fe8a0944c019f8cfb5ddd8f1b1ef8f5dbe18}}' - name: task-name value: consumer-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: importer.Succeeded name: consumer-op-driver template: system-container-driver @@ -277,19 +325,17 @@ spec: template: system-container-executor - arguments: parameters: - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"uri":{"runtimeValue":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"}}}},"taskInfo":{"name":"importer"}}' - - name: component - value: '{{workflow.parameters.components-comp-importer}}' + - name: task-name + value: importer - name: importer value: '{{workflow.parameters.implementations-comp-importer}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: importer template: system-importer inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -305,8 +351,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -317,8 +363,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -331,6 +377,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -340,6 +399,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -349,8 +412,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -359,9 +422,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -370,6 +433,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -384,8 +455,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_input_status_state.yaml b/test_data/compiled-workflows/pipeline_with_input_status_state.yaml index 220e1b24679..876570f67a1 100644 --- a/test_data/compiled-workflows/pipeline_with_input_status_state.yaml +++ b/test_data/compiled-workflows/pipeline_with_input_status_state.yaml @@ -59,8 +59,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -87,6 +87,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -96,13 +109,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -124,6 +141,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -153,6 +178,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -163,6 +192,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -202,6 +234,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -226,8 +265,8 @@ spec: value: '{{workflow.parameters.implementations-b7759bec7b7c73abcfcc5fe174f2f6965ece187d7e53beabe3e93fc81431ba91}}' - name: task-name value: some-task - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: some-task-driver template: system-container-driver - arguments: @@ -242,7 +281,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-exit-handler-1 outputs: {} @@ -258,8 +297,8 @@ spec: value: '{{workflow.parameters.implementations-6f22693acb9f2a815b4823044bf0a93f4c912566dd68e1d8ca2a680ac71b67d3}}' - name: task-name value: echo-state - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: echo-state-driver template: system-container-driver - arguments: @@ -274,7 +313,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: exit-hook-root-echo-state outputs: {} @@ -290,8 +329,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -302,8 +341,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -316,6 +355,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -325,6 +377,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -334,8 +390,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -344,9 +400,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -355,14 +411,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-exit-handler-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-exit-handler-1"},"taskInfo":{"name":"exit-handler-1"}}' - name: task-name @@ -371,8 +435,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.exit-handler-1-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.exit-handler-1-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.exit-handler-1-driver.outputs.parameters.condition}}' depends: exit-handler-1-driver.Succeeded @@ -380,14 +444,14 @@ spec: exit: arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' template: exit-hook-root-echo-state name: exit-handler-1 template: comp-exit-handler-1 inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -405,8 +469,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_loops.yaml b/test_data/compiled-workflows/pipeline_with_loops.yaml index 0d0ac3a1b1a..b9a8b50bcc9 100644 --- a/test_data/compiled-workflows/pipeline_with_loops.yaml +++ b/test_data/compiled-workflows/pipeline_with_loops.yaml @@ -75,8 +75,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -103,6 +103,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -112,13 +125,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -140,6 +157,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -169,6 +194,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -179,6 +208,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -218,6 +250,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -242,8 +281,10 @@ spec: value: '{{workflow.parameters.implementations-a37ae37968ac30a4c2376035e115521a351c56fae22ef37e91f7df600a415f7d}}' - name: task-name value: print-text - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-text-driver template: system-container-driver - arguments: @@ -253,12 +294,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-text-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-text-driver.Succeeded name: print-text template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-1 outputs: {} @@ -274,8 +318,10 @@ spec: value: '{{workflow.parameters.implementations-5d3bbc654be05c6e0386e82c98f120e616a0fc39246372d5e8cd1d973990c50f}}' - name: task-name value: print-struct - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-struct-driver template: system-container-driver - arguments: @@ -285,6 +331,8 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-struct-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-struct-driver.Succeeded name: print-struct template: system-container-executor @@ -298,8 +346,10 @@ spec: value: '{{workflow.parameters.implementations-a37ae37968ac30a4c2376035e115521a351c56fae22ef37e91f7df600a415f7d}}' - name: task-name value: print-text-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-text-2-driver template: system-container-driver - arguments: @@ -309,6 +359,8 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-text-2-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-text-2-driver.Succeeded name: print-text-2 template: system-container-executor @@ -322,8 +374,10 @@ spec: value: '{{workflow.parameters.implementations-a37ae37968ac30a4c2376035e115521a351c56fae22ef37e91f7df600a415f7d}}' - name: task-name value: print-text-3 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-text-3-driver template: system-container-driver - arguments: @@ -333,12 +387,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-text-3-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-text-3-driver.Succeeded name: print-text-3 template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-2 outputs: {} @@ -354,8 +411,10 @@ spec: value: '{{workflow.parameters.implementations-5d3bbc654be05c6e0386e82c98f120e616a0fc39246372d5e8cd1d973990c50f}}' - name: task-name value: print-struct-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-struct-2-driver template: system-container-driver - arguments: @@ -365,6 +424,8 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-struct-2-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-struct-2-driver.Succeeded name: print-struct-2 template: system-container-executor @@ -378,8 +439,10 @@ spec: value: '{{workflow.parameters.implementations-a37ae37968ac30a4c2376035e115521a351c56fae22ef37e91f7df600a415f7d}}' - name: task-name value: print-text-4 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-text-4-driver template: system-container-driver - arguments: @@ -389,6 +452,8 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-text-4-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-text-4-driver.Succeeded name: print-text-4 template: system-container-executor @@ -402,8 +467,10 @@ spec: value: '{{workflow.parameters.implementations-a37ae37968ac30a4c2376035e115521a351c56fae22ef37e91f7df600a415f7d}}' - name: task-name value: print-text-5 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-text-5-driver template: system-container-driver - arguments: @@ -413,12 +480,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-text-5-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-text-5-driver.Succeeded name: print-text-5 template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-4 outputs: {} @@ -434,8 +504,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -446,8 +516,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -460,6 +530,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -469,6 +552,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -478,8 +565,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -488,9 +575,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -499,62 +586,42 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-1}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-1"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-1 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-1-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-1"}}' + - name: task-name + value: for-loop-1 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-1-iteration + template: comp-for-loop-1 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-1-for-loop-1-iterator outputs: {} @@ -564,56 +631,28 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-2"},"dependentTasks":["args-generator-op"],"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"args-generator-op"}}}},"parameterIterator":{"itemInput":"pipelinechannel--args-generator-op-Output-loop-item","items":{"inputParameter":"pipelinechannel--args-generator-op-Output"}},"taskInfo":{"name":"for-loop-2"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-2 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-2-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-2"},"dependentTasks":["args-generator-op"],"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"args-generator-op"}}}},"parameterIterator":{"itemInput":"pipelinechannel--args-generator-op-Output-loop-item","items":{"inputParameter":"pipelinechannel--args-generator-op-Output"}},"taskInfo":{"name":"for-loop-2"}}' + - name: task-name + value: for-loop-2 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-2-iteration + template: comp-for-loop-2 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-2-for-loop-2-iterator outputs: {} @@ -623,58 +662,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-4}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[{\"A_a\": - \"1\", \"B_b\": \"2\"}, {\"A_a\": \"10\", \"B_b\": \"20\"}]"}},"taskInfo":{"name":"for-loop-4"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-4 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-4-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-4}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[{\"A_a\": \"1\", \"B_b\": \"2\"}, {\"A_a\": \"10\", \"B_b\": \"20\"}]"}},"taskInfo":{"name":"for-loop-4"}}' + - name: task-name + value: for-loop-4 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-4-iteration + template: comp-for-loop-4 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-4-for-loop-4-iterator outputs: {} @@ -690,8 +700,8 @@ spec: value: '{{workflow.parameters.implementations-687d86676223e8ed6d5a94779ca21c78d7c45d0c14796f2992c0b78dc949e530}}' - name: task-name value: args-generator-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: args-generator-op-driver template: system-container-driver - arguments: @@ -706,26 +716,26 @@ spec: template: system-container-executor - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-1 template: comp-for-loop-1-for-loop-1-iterator - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: args-generator-op.Succeeded name: for-loop-2 template: comp-for-loop-2-for-loop-2-iterator - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-4 template: comp-for-loop-4-for-loop-4-iterator inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -743,8 +753,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_loops_and_conditions.yaml b/test_data/compiled-workflows/pipeline_with_loops_and_conditions.yaml index 3c5c55befe6..f2ea78f195b 100644 --- a/test_data/compiled-workflows/pipeline_with_loops_and_conditions.yaml +++ b/test_data/compiled-workflows/pipeline_with_loops_and_conditions.yaml @@ -121,8 +121,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -149,6 +149,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -158,13 +171,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -186,6 +203,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -215,6 +240,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -225,6 +254,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -264,6 +296,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -288,8 +327,8 @@ spec: value: '{{workflow.parameters.implementations-32f06a188ecaaebfd19502d5193eca7e8ceab20f38ca2c202c113df604e90a52}}' - name: task-name value: print-text-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-text-2-driver template: system-container-driver - arguments: @@ -304,7 +343,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-3 outputs: {} @@ -320,8 +359,8 @@ spec: value: '{{workflow.parameters.implementations-32f06a188ecaaebfd19502d5193eca7e8ceab20f38ca2c202c113df604e90a52}}' - name: task-name value: print-text-3 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-text-3-driver template: system-container-driver - arguments: @@ -336,7 +375,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-4 outputs: {} @@ -352,8 +391,10 @@ spec: value: '{{workflow.parameters.implementations-8853c56a797c7cf679a34e10437594f376f7496f7902f009aa8bb52f84544af7}}' - name: task-name value: print-struct - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-struct-driver template: system-container-driver - arguments: @@ -363,12 +404,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-struct-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-struct-driver.Succeeded name: print-struct template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-7 outputs: {} @@ -384,8 +428,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -396,8 +440,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -410,6 +454,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -419,6 +476,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -428,8 +489,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -438,9 +499,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -449,64 +510,43 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-7}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-7"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-6","items":{"raw":"[{\"a\": - \"-1\"}, {\"a\": \"-2\"}]"}},"taskInfo":{"name":"for-loop-7"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-7 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-7-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-7}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-7"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-6","items":{"raw":"[{\"a\": \"-1\"}, {\"a\": \"-2\"}]"}},"taskInfo":{"name":"for-loop-7"}}' + - name: task-name + value: for-loop-7 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-7-iteration + template: comp-for-loop-7 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-7-for-loop-7-iterator outputs: {} @@ -514,13 +554,13 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-7 template: comp-for-loop-7-for-loop-7-iterator inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-5 outputs: {} @@ -536,8 +576,8 @@ spec: value: '{{workflow.parameters.implementations-32f06a188ecaaebfd19502d5193eca7e8ceab20f38ca2c202c113df604e90a52}}' - name: task-name value: print-text-8 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-text-8-driver template: system-container-driver - arguments: @@ -552,7 +592,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-13 outputs: {} @@ -562,21 +602,25 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-13}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-13"},"inputs":{"parameters":{"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"},"pipelinechannel--loop-item-param-11":{"componentInputParameter":"pipelinechannel--loop-item-param-11"}}},"taskInfo":{"name":"condition-13"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--loop-item-param-11''] == ''1''"}}' - name: task-name value: condition-13 + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: condition-13-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-13-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-13-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-13-driver.outputs.parameters.condition}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: condition-13-driver.Succeeded name: condition-13 template: comp-condition-13 @@ -591,8 +635,10 @@ spec: value: '{{workflow.parameters.implementations-32f06a188ecaaebfd19502d5193eca7e8ceab20f38ca2c202c113df604e90a52}}' - name: task-name value: print-text-7 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-text-7-driver template: system-container-driver - arguments: @@ -602,12 +648,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-text-7-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-text-7-driver.Succeeded name: print-text-7 template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-12 outputs: {} @@ -623,8 +672,10 @@ spec: value: '{{workflow.parameters.implementations-32f06a188ecaaebfd19502d5193eca7e8ceab20f38ca2c202c113df604e90a52}}' - name: task-name value: print-text-4 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-text-4-driver template: system-container-driver - arguments: @@ -634,12 +685,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-text-4-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-text-4-driver.Succeeded name: print-text-4 template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-8 outputs: {} @@ -655,8 +709,10 @@ spec: value: '{{workflow.parameters.implementations-32f06a188ecaaebfd19502d5193eca7e8ceab20f38ca2c202c113df604e90a52}}' - name: task-name value: print-text-6 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-text-6-driver template: system-container-driver - arguments: @@ -666,44 +722,17 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-text-6-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-text-6-driver.Succeeded name: print-text-6 template: system-container-executor inputs: parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-10 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-10}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-10"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-2-Output":{"componentInputParameter":"pipelinechannel--args-generator-op-2-Output"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"},"pipelinechannel--loop_parameter-loop-item":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"}}},"parameterIterator":{"itemInput":"pipelinechannel--args-generator-op-2-Output-loop-item","items":{"inputParameter":"pipelinechannel--args-generator-op-2-Output"}},"taskInfo":{"name":"for-loop-10"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-10 - inputs: - parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-10-iteration + name: comp-for-loop-10 outputs: {} - dag: tasks: @@ -711,26 +740,28 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-10}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-10"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-2-Output":{"componentInputParameter":"pipelinechannel--args-generator-op-2-Output"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"},"pipelinechannel--loop_parameter-loop-item":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"}}},"parameterIterator":{"itemInput":"pipelinechannel--args-generator-op-2-Output-loop-item","items":{"inputParameter":"pipelinechannel--args-generator-op-2-Output"}},"taskInfo":{"name":"for-loop-10"}}' + - name: task-name + value: for-loop-10 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-10-iteration + template: comp-for-loop-10 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-10-for-loop-10-iterator outputs: {} @@ -738,8 +769,10 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: for-loop-10 template: comp-for-loop-10-for-loop-10-iterator - arguments: @@ -752,8 +785,10 @@ spec: value: '{{workflow.parameters.implementations-32f06a188ecaaebfd19502d5193eca7e8ceab20f38ca2c202c113df604e90a52}}' - name: task-name value: print-text-5 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-text-5-driver template: system-container-driver - arguments: @@ -763,45 +798,17 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-text-5-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-text-5-driver.Succeeded name: print-text-5 template: system-container-executor inputs: parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-9 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-12}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-12"},"inputs":{"parameters":{"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-11","items":{"raw":"[\"1\", - \"2\"]"}},"taskInfo":{"name":"for-loop-12"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-12 - inputs: - parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-12-iteration + name: comp-for-loop-9 outputs: {} - dag: tasks: @@ -809,27 +816,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-12}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-12"},"inputs":{"parameters":{"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-11","items":{"raw":"[\"1\", \"2\"]"}},"taskInfo":{"name":"for-loop-12"}}' + - name: task-name + value: for-loop-12 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-12-iteration + template: comp-for-loop-12 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-12-for-loop-12-iterator outputs: {} @@ -839,56 +848,28 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-8}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-8"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item"},"pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"parameterIterator":{"itemInput":"pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b-loop-item","items":{"inputParameter":"pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b"}},"taskInfo":{"name":"for-loop-8"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-8 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-8-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-8}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-8"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item"},"pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"parameterIterator":{"itemInput":"pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b-loop-item","items":{"inputParameter":"pipelinechannel--args-generator-op-Output-loop-item-subvar-B_b"}},"taskInfo":{"name":"for-loop-8"}}' + - name: task-name + value: for-loop-8 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-8-iteration + template: comp-for-loop-8 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-8-for-loop-8-iterator outputs: {} @@ -898,56 +879,28 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-9}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-9"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-2-Output":{"componentInputParameter":"pipelinechannel--args-generator-op-2-Output"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"},"pipelinechannel--loop_parameter":{"componentInputParameter":"pipelinechannel--loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-9"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-9 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-9-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-9}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-9"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-2-Output":{"componentInputParameter":"pipelinechannel--args-generator-op-2-Output"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"},"pipelinechannel--loop_parameter":{"componentInputParameter":"pipelinechannel--loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-9"}}' + - name: task-name + value: for-loop-9 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-9-iteration + template: comp-for-loop-9 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-9-for-loop-9-iterator outputs: {} @@ -957,21 +910,25 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-3}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-3"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item"},"pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a''] == ''heads''"}}' - name: task-name value: condition-3 + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: condition-3-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-3-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-3-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-3-driver.outputs.parameters.condition}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: condition-3-driver.Succeeded name: condition-3 template: comp-condition-3 @@ -980,21 +937,25 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-4}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-4"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"taskInfo":{"name":"condition-4"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-op-Output''] == ''heads''"}}' - name: task-name value: condition-4 + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: condition-4-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-4-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-4-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-4-driver.outputs.parameters.condition}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: condition-4-driver.Succeeded name: condition-4 template: comp-condition-4 @@ -1003,41 +964,51 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-5}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-5"},"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output-loop-item":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item"},"pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a":{"componentInputParameter":"pipelinechannel--args-generator-op-Output-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"taskInfo":{"name":"condition-5"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--args-generator-op-Output-loop-item-subvar-A_a''] == ''tails''"}}' - name: task-name value: condition-5 + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: condition-5-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-5-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-5-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-5-driver.outputs.parameters.condition}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: condition-5-driver.Succeeded name: condition-5 template: comp-condition-5 when: '{{tasks.condition-5-driver.outputs.parameters.condition}} != false' - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: for-loop-12 template: comp-for-loop-12-for-loop-12-iterator - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: for-loop-8 template: comp-for-loop-8-for-loop-8-iterator - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: for-loop-9 template: comp-for-loop-9-for-loop-9-iterator - arguments: @@ -1050,8 +1021,10 @@ spec: value: '{{workflow.parameters.implementations-32f06a188ecaaebfd19502d5193eca7e8ceab20f38ca2c202c113df604e90a52}}' - name: task-name value: print-text - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-text-driver template: system-container-driver - arguments: @@ -1061,44 +1034,17 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-text-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-text-driver.Succeeded name: print-text template: system-container-executor inputs: parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-2 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-2"},"dependentTasks":["args-generator-op-2"],"inputs":{"parameters":{"pipelinechannel--args-generator-op-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"args-generator-op-2"}},"pipelinechannel--args-generator-op-Output":{"componentInputParameter":"pipelinechannel--args-generator-op-Output"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"},"pipelinechannel--loop_parameter":{"componentInputParameter":"pipelinechannel--loop_parameter"},"pipelinechannel--msg":{"componentInputParameter":"pipelinechannel--msg"}}},"parameterIterator":{"itemInput":"pipelinechannel--args-generator-op-Output-loop-item","items":{"inputParameter":"pipelinechannel--args-generator-op-Output"}},"taskInfo":{"name":"for-loop-2"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-2 - inputs: - parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-2-iteration + name: comp-for-loop-2 outputs: {} - dag: tasks: @@ -1106,26 +1052,28 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-2"},"dependentTasks":["args-generator-op-2"],"inputs":{"parameters":{"pipelinechannel--args-generator-op-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"args-generator-op-2"}},"pipelinechannel--args-generator-op-Output":{"componentInputParameter":"pipelinechannel--args-generator-op-Output"},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"},"pipelinechannel--loop_parameter":{"componentInputParameter":"pipelinechannel--loop_parameter"},"pipelinechannel--msg":{"componentInputParameter":"pipelinechannel--msg"}}},"parameterIterator":{"itemInput":"pipelinechannel--args-generator-op-Output-loop-item","items":{"inputParameter":"pipelinechannel--args-generator-op-Output"}},"taskInfo":{"name":"for-loop-2"}}' + - name: task-name + value: for-loop-2 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-2-iteration + template: comp-for-loop-2 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-2-for-loop-2-iterator outputs: {} @@ -1141,8 +1089,8 @@ spec: value: '{{workflow.parameters.implementations-ac68354549bad0bc7c4df15cdbcb5ecaacdd64a74d077c031e7c4c8d91aab247}}' - name: task-name value: args-generator-op-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: args-generator-op-2-driver template: system-container-driver - arguments: @@ -1157,14 +1105,14 @@ spec: template: system-container-executor - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: args-generator-op-2.Succeeded name: for-loop-2 template: comp-for-loop-2-for-loop-2-iterator inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-1 outputs: {} @@ -1180,8 +1128,10 @@ spec: value: '{{workflow.parameters.implementations-32f06a188ecaaebfd19502d5193eca7e8ceab20f38ca2c202c113df604e90a52}}' - name: task-name value: print-text-9 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-text-9-driver template: system-container-driver - arguments: @@ -1191,44 +1141,17 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-text-9-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-text-9-driver.Succeeded name: print-text-9 template: system-container-executor inputs: parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-16 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-16}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-16"},"inputs":{"parameters":{"pipelinechannel--loop_parameter-loop-item":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"},"pipelinechannel--loop_parameter-loop-item-subvar-B_b":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item-subvar-B_b-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter-loop-item-subvar-B_b"}},"taskInfo":{"name":"for-loop-16"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-16 - inputs: - parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-16-iteration + name: comp-for-loop-16 outputs: {} - dag: tasks: @@ -1236,26 +1159,28 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-16}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-16"},"inputs":{"parameters":{"pipelinechannel--loop_parameter-loop-item":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"},"pipelinechannel--loop_parameter-loop-item-subvar-B_b":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"B_b\"]"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item-subvar-B_b-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter-loop-item-subvar-B_b"}},"taskInfo":{"name":"for-loop-16"}}' + - name: task-name + value: for-loop-16 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-16-iteration + template: comp-for-loop-16 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-16-for-loop-16-iterator outputs: {} @@ -1263,13 +1188,13 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-16 template: comp-for-loop-16-for-loop-16-iterator inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-15 outputs: {} @@ -1279,60 +1204,35 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-15}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-15"},"inputs":{"parameters":{"pipelinechannel--loop_parameter-loop-item":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"},"pipelinechannel--loop_parameter-loop-item-subvar-A_a":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"A_a\"]"}}},"taskInfo":{"name":"condition-15"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--loop_parameter-loop-item-subvar-A_a''] == ''heads''"}}' - name: task-name value: condition-15 + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: condition-15-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-15-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-15-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-15-driver.outputs.parameters.condition}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: condition-15-driver.Succeeded name: condition-15 template: comp-condition-15 when: '{{tasks.condition-15-driver.outputs.parameters.condition}} != false' inputs: parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-14 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-14}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-14"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-14"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-14 - inputs: - parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-14-iteration + name: comp-for-loop-14 outputs: {} - dag: tasks: @@ -1340,26 +1240,28 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-14}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-14"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-14"}}' + - name: task-name + value: for-loop-14 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-14-iteration + template: comp-for-loop-14 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-14-for-loop-14-iterator outputs: {} @@ -1375,8 +1277,8 @@ spec: value: '{{workflow.parameters.implementations-ac68354549bad0bc7c4df15cdbcb5ecaacdd64a74d077c031e7c4c8d91aab247}}' - name: task-name value: args-generator-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: args-generator-op-driver template: system-container-driver - arguments: @@ -1393,8 +1295,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["args-generator-op","flip-coin-op"],"inputs":{"parameters":{"pipelinechannel--args-generator-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"args-generator-op"}},"pipelinechannel--flip-coin-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op"}},"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"},"pipelinechannel--msg":{"componentInputParameter":"msg"}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-op-Output''] != ''no-such-result''"}}' @@ -1405,8 +1307,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-1-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-1-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-1-driver.outputs.parameters.condition}}' depends: condition-1-driver.Succeeded @@ -1423,8 +1325,8 @@ spec: value: '{{workflow.parameters.implementations-663f234c274873c6b1c76e2f91561ee25169ff505cfe45310e51e4b0265a2d98}}' - name: task-name value: flip-coin-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: flip-coin-op-driver template: system-container-driver - arguments: @@ -1439,13 +1341,13 @@ spec: template: system-container-executor - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-14 template: comp-for-loop-14-for-loop-14-iterator inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -1463,8 +1365,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_metadata_fields.yaml b/test_data/compiled-workflows/pipeline_with_metadata_fields.yaml index 736fcb608e7..c5d10d087ba 100644 --- a/test_data/compiled-workflows/pipeline_with_metadata_fields.yaml +++ b/test_data/compiled-workflows/pipeline_with_metadata_fields.yaml @@ -72,8 +72,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -100,6 +100,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -109,13 +122,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -137,6 +154,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -166,6 +191,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -176,6 +205,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -215,6 +247,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -239,8 +278,8 @@ spec: value: '{{workflow.parameters.implementations-e64230eee42e89b7bed44c01e0e5aadf92c2b123bf1ae03372c1e07eeadee7ff}}' - name: task-name value: dataset-joiner - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: str-to-dataset.Succeeded name: dataset-joiner-driver template: system-container-driver @@ -264,8 +303,8 @@ spec: value: '{{workflow.parameters.implementations-0067337b015ccea33fdb5b6603365552f4cca4f9eae4b1497bd033c4b3f843b9}}' - name: task-name value: str-to-dataset - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: str-to-dataset-driver template: system-container-driver - arguments: @@ -280,7 +319,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -296,8 +335,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -308,8 +347,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -322,6 +361,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -331,6 +383,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -340,8 +396,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -350,9 +406,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -361,6 +417,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -375,8 +439,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_metrics_outputs.yaml b/test_data/compiled-workflows/pipeline_with_metrics_outputs.yaml index ea3242719c8..da75a9b0186 100644 --- a/test_data/compiled-workflows/pipeline_with_metrics_outputs.yaml +++ b/test_data/compiled-workflows/pipeline_with_metrics_outputs.yaml @@ -46,8 +46,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -74,6 +74,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -83,13 +96,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -111,6 +128,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -140,6 +165,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -150,6 +179,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -189,6 +221,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -213,8 +252,10 @@ spec: value: '{{workflow.parameters.implementations-ab5b53a52a65c78da1d8dcfca19f7568d6844ad951c971baa75d318f6528cd97}}' - name: task-name value: output-metrics-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: output-metrics-2-driver template: system-container-driver - arguments: @@ -224,12 +265,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.output-metrics-2-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: output-metrics-2-driver.Succeeded name: output-metrics-2 template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-2 outputs: {} @@ -245,8 +289,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -257,8 +301,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -271,6 +315,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -280,6 +337,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -289,8 +350,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -299,9 +360,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -310,64 +371,43 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, - 2]"}},"taskInfo":{"name":"for-loop-2"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-2 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-2-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-2"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[1, 2]"}},"taskInfo":{"name":"for-loop-2"}}' + - name: task-name + value: for-loop-2 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-2-iteration + template: comp-for-loop-2 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-2-for-loop-2-iterator outputs: {} @@ -375,8 +415,8 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-2 template: comp-for-loop-2-for-loop-2-iterator - arguments: @@ -389,8 +429,8 @@ spec: value: '{{workflow.parameters.implementations-ab5b53a52a65c78da1d8dcfca19f7568d6844ad951c971baa75d318f6528cd97}}' - name: task-name value: output-metrics - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: output-metrics-driver template: system-container-driver - arguments: @@ -405,7 +445,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -423,8 +463,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_multiple_exit_handlers.yaml b/test_data/compiled-workflows/pipeline_with_multiple_exit_handlers.yaml index e28c8a67071..a7b6e6e4006 100644 --- a/test_data/compiled-workflows/pipeline_with_multiple_exit_handlers.yaml +++ b/test_data/compiled-workflows/pipeline_with_multiple_exit_handlers.yaml @@ -65,8 +65,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -93,6 +93,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -102,13 +115,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -130,6 +147,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -159,6 +184,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -169,6 +198,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -208,6 +240,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -233,8 +272,8 @@ spec: value: '{{workflow.parameters.implementations-0e20296d1c94dd957c12f688261536d608c0a9430b71b2c67c664244a11161e8}}' - name: task-name value: fail-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: fail-op-driver template: system-container-driver - arguments: @@ -257,8 +296,8 @@ spec: value: '{{workflow.parameters.implementations-af17df745243f885d9854f1931988534c66b26d6c60c5279e354d6b3480dceb8}}' - name: task-name value: print-op-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op-2-driver template: system-container-driver - arguments: @@ -273,7 +312,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-exit-handler-1 outputs: {} @@ -289,8 +328,8 @@ spec: value: '{{workflow.parameters.implementations-af17df745243f885d9854f1931988534c66b26d6c60c5279e354d6b3480dceb8}}' - name: task-name value: print-op-4 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op-4-driver template: system-container-driver - arguments: @@ -305,7 +344,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-exit-handler-2 outputs: {} @@ -321,8 +360,8 @@ spec: value: '{{workflow.parameters.implementations-af17df745243f885d9854f1931988534c66b26d6c60c5279e354d6b3480dceb8}}' - name: task-name value: print-op-6 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op-6-driver template: system-container-driver - arguments: @@ -337,7 +376,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-exit-handler-3 outputs: {} @@ -354,8 +393,8 @@ spec: value: '{{workflow.parameters.implementations-af17df745243f885d9854f1931988534c66b26d6c60c5279e354d6b3480dceb8}}' - name: task-name value: print-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op-driver template: system-container-driver - arguments: @@ -370,7 +409,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: exit-hook-root-print-op outputs: {} @@ -387,8 +426,8 @@ spec: value: '{{workflow.parameters.implementations-af17df745243f885d9854f1931988534c66b26d6c60c5279e354d6b3480dceb8}}' - name: task-name value: print-op-3 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op-3-driver template: system-container-driver - arguments: @@ -403,7 +442,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: exit-hook-root-print-op-3 outputs: {} @@ -420,8 +459,8 @@ spec: value: '{{workflow.parameters.implementations-af17df745243f885d9854f1931988534c66b26d6c60c5279e354d6b3480dceb8}}' - name: task-name value: print-op-5 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op-5-driver template: system-container-driver - arguments: @@ -436,7 +475,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: exit-hook-root-print-op-5 outputs: {} @@ -452,8 +491,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -464,8 +503,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -478,6 +517,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -487,6 +539,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -496,8 +552,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -506,9 +562,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -517,14 +573,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-exit-handler-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-exit-handler-1"},"inputs":{"parameters":{"pipelinechannel--message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"exit-handler-1"}}' - name: task-name @@ -533,8 +597,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.exit-handler-1-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.exit-handler-1-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.exit-handler-1-driver.outputs.parameters.condition}}' depends: exit-handler-1-driver.Succeeded @@ -542,8 +606,8 @@ spec: exit: arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' template: exit-hook-root-print-op name: exit-handler-1 template: comp-exit-handler-1 @@ -551,8 +615,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-exit-handler-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-exit-handler-2"},"inputs":{"parameters":{"pipelinechannel--message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"exit-handler-2"}}' - name: task-name @@ -561,8 +625,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.exit-handler-2-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.exit-handler-2-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.exit-handler-2-driver.outputs.parameters.condition}}' depends: exit-handler-2-driver.Succeeded @@ -570,8 +634,8 @@ spec: exit: arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' template: exit-hook-root-print-op-3 name: exit-handler-2 template: comp-exit-handler-2 @@ -579,8 +643,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-exit-handler-3}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-exit-handler-3"},"inputs":{"parameters":{"pipelinechannel--message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"exit-handler-3"}}' - name: task-name @@ -589,8 +653,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.exit-handler-3-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.exit-handler-3-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.exit-handler-3-driver.outputs.parameters.condition}}' depends: exit-handler-3-driver.Succeeded @@ -598,14 +662,14 @@ spec: exit: arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' template: exit-hook-root-print-op-5 name: exit-handler-3 template: comp-exit-handler-3 inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -623,8 +687,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_nested_conditions.yaml b/test_data/compiled-workflows/pipeline_with_nested_conditions.yaml index 016fc3dccd5..0ce47d4e57a 100644 --- a/test_data/compiled-workflows/pipeline_with_nested_conditions.yaml +++ b/test_data/compiled-workflows/pipeline_with_nested_conditions.yaml @@ -62,8 +62,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -90,6 +90,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -99,13 +112,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -127,6 +144,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -156,6 +181,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -166,6 +195,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -205,6 +237,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -229,8 +268,8 @@ spec: value: '{{workflow.parameters.implementations-663f234c274873c6b1c76e2f91561ee25169ff505cfe45310e51e4b0265a2d98}}' - name: task-name value: flip-coin-op-4 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: flip-coin-op-4-driver template: system-container-driver - arguments: @@ -253,8 +292,8 @@ spec: value: '{{workflow.parameters.implementations-0a97e2e381526d8572c8a0a776550e66305543ba3a2032eb60de0ef3e42504b5}}' - name: task-name value: print-op-4 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: flip-coin-op-4.Succeeded name: print-op-4-driver template: system-container-driver @@ -270,7 +309,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-2 outputs: {} @@ -286,8 +325,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -298,8 +337,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -312,6 +351,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -321,6 +373,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -330,8 +386,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -340,9 +396,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -351,14 +407,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-2"},"dependentTasks":["flip-coin-op-3"],"inputs":{"parameters":{"pipelinechannel--flip-coin-op-2-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-2-Output"},"pipelinechannel--flip-coin-op-3-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op-3"}},"pipelinechannel--flip-coin-op-Output":{"componentInputParameter":"pipelinechannel--flip-coin-op-Output"}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-op-2-Output''] == inputs.parameter_values[''pipelinechannel--flip-coin-op-3-Output'']"}}' @@ -369,8 +433,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-2-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-2-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-2-driver.outputs.parameters.condition}}' depends: condition-2-driver.Succeeded @@ -387,8 +451,8 @@ spec: value: '{{workflow.parameters.implementations-663f234c274873c6b1c76e2f91561ee25169ff505cfe45310e51e4b0265a2d98}}' - name: task-name value: flip-coin-op-3 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: flip-coin-op-3-driver template: system-container-driver - arguments: @@ -411,8 +475,8 @@ spec: value: '{{workflow.parameters.implementations-0a97e2e381526d8572c8a0a776550e66305543ba3a2032eb60de0ef3e42504b5}}' - name: task-name value: print-op-3 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: flip-coin-op-3.Succeeded name: print-op-3-driver template: system-container-driver @@ -428,7 +492,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-1 outputs: {} @@ -438,8 +502,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["flip-coin-op","flip-coin-op-2"],"inputs":{"parameters":{"pipelinechannel--flip-coin-op-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op-2"}},"pipelinechannel--flip-coin-op-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"flip-coin-op"}}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-op-Output''] != ''no-such-result''"}}' @@ -450,8 +514,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-1-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-1-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-1-driver.outputs.parameters.condition}}' depends: condition-1-driver.Succeeded @@ -468,8 +532,8 @@ spec: value: '{{workflow.parameters.implementations-663f234c274873c6b1c76e2f91561ee25169ff505cfe45310e51e4b0265a2d98}}' - name: task-name value: flip-coin-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: flip-coin-op-driver template: system-container-driver - arguments: @@ -492,8 +556,8 @@ spec: value: '{{workflow.parameters.implementations-663f234c274873c6b1c76e2f91561ee25169ff505cfe45310e51e4b0265a2d98}}' - name: task-name value: flip-coin-op-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: flip-coin-op-2-driver template: system-container-driver - arguments: @@ -516,8 +580,8 @@ spec: value: '{{workflow.parameters.implementations-0a97e2e381526d8572c8a0a776550e66305543ba3a2032eb60de0ef3e42504b5}}' - name: task-name value: print-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: flip-coin-op.Succeeded name: print-op-driver template: system-container-driver @@ -541,8 +605,8 @@ spec: value: '{{workflow.parameters.implementations-0a97e2e381526d8572c8a0a776550e66305543ba3a2032eb60de0ef3e42504b5}}' - name: task-name value: print-op-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: flip-coin-op-2.Succeeded name: print-op-2-driver template: system-container-driver @@ -558,7 +622,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -576,8 +640,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_nested_conditions_yaml.yaml b/test_data/compiled-workflows/pipeline_with_nested_conditions_yaml.yaml index 3d3c2ffb11a..7039df46f26 100644 --- a/test_data/compiled-workflows/pipeline_with_nested_conditions_yaml.yaml +++ b/test_data/compiled-workflows/pipeline_with_nested_conditions_yaml.yaml @@ -74,8 +74,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -102,6 +102,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -111,13 +124,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -139,6 +156,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -168,6 +193,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -178,6 +207,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -217,6 +249,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -243,8 +282,8 @@ spec: value: '{{workflow.parameters.implementations-2c7a24d85a90f8909bcf825238cdd7bc3d0cc7d64cb2978ecd973730a39452a8}}' - name: task-name value: print - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-driver template: system-container-driver - arguments: @@ -259,7 +298,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-2 outputs: {} @@ -277,8 +316,8 @@ spec: value: '{{workflow.parameters.implementations-2c7a24d85a90f8909bcf825238cdd7bc3d0cc7d64cb2978ecd973730a39452a8}}' - name: task-name value: print-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-2-driver template: system-container-driver - arguments: @@ -293,7 +332,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-3 outputs: {} @@ -309,8 +348,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -321,8 +360,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -335,6 +374,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -344,6 +396,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -353,8 +409,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -363,9 +419,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -374,14 +430,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-2"},"dependentTasks":["generate-random-number"],"inputs":{"parameters":{"pipelinechannel--flip-coin-output":{"componentInputParameter":"pipelinechannel--flip-coin-output"},"pipelinechannel--generate-random-number-output":{"taskOutputParameter":{"outputParameterKey":"output","producerTask":"generate-random-number"}}}},"taskInfo":{"name":"condition-2"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--generate-random-number-output'']) \u003e 5"}}' @@ -392,8 +456,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-2-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-2-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-2-driver.outputs.parameters.condition}}' depends: condition-2-driver.Succeeded @@ -404,8 +468,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-3}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-3"},"dependentTasks":["generate-random-number"],"inputs":{"parameters":{"pipelinechannel--flip-coin-output":{"componentInputParameter":"pipelinechannel--flip-coin-output"},"pipelinechannel--generate-random-number-output":{"taskOutputParameter":{"outputParameterKey":"output","producerTask":"generate-random-number"}}}},"taskInfo":{"name":"condition-3"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--generate-random-number-output'']) \u003c= 5"}}' @@ -416,8 +480,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-3-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-3-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-3-driver.outputs.parameters.condition}}' depends: condition-3-driver.Succeeded @@ -434,8 +498,8 @@ spec: value: '{{workflow.parameters.implementations-789e9158be0ce0b20b6e4fa73b0cecf695bedfb779d240bbf5774c468c94d421}}' - name: task-name value: generate-random-number - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: generate-random-number-driver template: system-container-driver - arguments: @@ -450,7 +514,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-1 outputs: {} @@ -468,8 +532,8 @@ spec: value: '{{workflow.parameters.implementations-2c7a24d85a90f8909bcf825238cdd7bc3d0cc7d64cb2978ecd973730a39452a8}}' - name: task-name value: print-3 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-3-driver template: system-container-driver - arguments: @@ -484,7 +548,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-5 outputs: {} @@ -502,8 +566,8 @@ spec: value: '{{workflow.parameters.implementations-2c7a24d85a90f8909bcf825238cdd7bc3d0cc7d64cb2978ecd973730a39452a8}}' - name: task-name value: print-4 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-4-driver template: system-container-driver - arguments: @@ -518,7 +582,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-6 outputs: {} @@ -528,8 +592,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-5}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-5"},"dependentTasks":["generate-random-number-2"],"inputs":{"parameters":{"pipelinechannel--flip-coin-output":{"componentInputParameter":"pipelinechannel--flip-coin-output"},"pipelinechannel--generate-random-number-2-output":{"taskOutputParameter":{"outputParameterKey":"output","producerTask":"generate-random-number-2"}}}},"taskInfo":{"name":"condition-5"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--generate-random-number-2-output'']) \u003e 15"}}' @@ -540,8 +604,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-5-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-5-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-5-driver.outputs.parameters.condition}}' depends: condition-5-driver.Succeeded @@ -552,8 +616,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-6}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-6"},"dependentTasks":["generate-random-number-2"],"inputs":{"parameters":{"pipelinechannel--flip-coin-output":{"componentInputParameter":"pipelinechannel--flip-coin-output"},"pipelinechannel--generate-random-number-2-output":{"taskOutputParameter":{"outputParameterKey":"output","producerTask":"generate-random-number-2"}}}},"taskInfo":{"name":"condition-6"},"triggerPolicy":{"condition":"int(inputs.parameter_values[''pipelinechannel--generate-random-number-2-output'']) \u003c= 15"}}' @@ -564,8 +628,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-6-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-6-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-6-driver.outputs.parameters.condition}}' depends: condition-6-driver.Succeeded @@ -582,8 +646,8 @@ spec: value: '{{workflow.parameters.implementations-fa0ad72f3897e2b1e2e1ce0644d93bd42b4bb665a29b5acdd23005c215f2c8fc}}' - name: task-name value: generate-random-number-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: generate-random-number-2-driver template: system-container-driver - arguments: @@ -598,7 +662,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-condition-4 outputs: {} @@ -608,8 +672,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-1"},"dependentTasks":["flip-coin"],"inputs":{"parameters":{"pipelinechannel--flip-coin-output":{"taskOutputParameter":{"outputParameterKey":"output","producerTask":"flip-coin"}}}},"taskInfo":{"name":"condition-1"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-output''] == ''heads''"}}' @@ -620,8 +684,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-1-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-1-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-1-driver.outputs.parameters.condition}}' depends: condition-1-driver.Succeeded @@ -632,8 +696,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-condition-4}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-condition-4"},"dependentTasks":["flip-coin"],"inputs":{"parameters":{"pipelinechannel--flip-coin-output":{"taskOutputParameter":{"outputParameterKey":"output","producerTask":"flip-coin"}}}},"taskInfo":{"name":"condition-4"},"triggerPolicy":{"condition":"inputs.parameter_values[''pipelinechannel--flip-coin-output''] == ''tails''"}}' @@ -644,8 +708,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.condition-4-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.condition-4-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.condition-4-driver.outputs.parameters.condition}}' depends: condition-4-driver.Succeeded @@ -662,8 +726,8 @@ spec: value: '{{workflow.parameters.implementations-4a03f280b1b5aa6f490a19d8d2607c2a76f999db6f303a0716d80a3e8abd97d6}}' - name: task-name value: flip-coin - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: flip-coin-driver template: system-container-driver - arguments: @@ -678,7 +742,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -696,8 +760,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_nested_loops.yaml b/test_data/compiled-workflows/pipeline_with_nested_loops.yaml index 2e06dc3d1dd..f179f2ca450 100644 --- a/test_data/compiled-workflows/pipeline_with_nested_loops.yaml +++ b/test_data/compiled-workflows/pipeline_with_nested_loops.yaml @@ -52,8 +52,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -80,6 +80,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -89,13 +102,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -117,6 +134,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -146,6 +171,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -156,6 +185,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -195,6 +227,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -219,8 +258,10 @@ spec: value: '{{workflow.parameters.implementations-8fce140f5724af7f5973bdf372e2b23f2cbf1b9e99ac8244a5ec5fa13a3bce77}}' - name: task-name value: print-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-op-driver template: system-container-driver - arguments: @@ -230,12 +271,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-op-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-op-driver.Succeeded name: print-op template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-2 outputs: {} @@ -251,8 +295,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -263,8 +307,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -277,6 +321,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -286,6 +343,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -295,8 +356,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -305,9 +366,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -316,62 +377,42 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--loop_parameter-loop-item":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"},"pipelinechannel--loop_parameter-loop-item-subvar-p_a":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"p_a\"]"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item-subvar-p_a-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter-loop-item-subvar-p_a"}},"taskInfo":{"name":"for-loop-2"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-2 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-2-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--loop_parameter-loop-item":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item"},"pipelinechannel--loop_parameter-loop-item-subvar-p_a":{"componentInputParameter":"pipelinechannel--loop_parameter-loop-item","parameterExpressionSelector":"parseJson(string_value)[\"p_a\"]"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item-subvar-p_a-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter-loop-item-subvar-p_a"}},"taskInfo":{"name":"for-loop-2"}}' + - name: task-name + value: for-loop-2 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-2-iteration + template: comp-for-loop-2 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-2-for-loop-2-iterator outputs: {} @@ -379,13 +420,16 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: for-loop-2 template: comp-for-loop-2-for-loop-2-iterator inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-1 outputs: {} @@ -401,8 +445,10 @@ spec: value: '{{workflow.parameters.implementations-8fce140f5724af7f5973bdf372e2b23f2cbf1b9e99ac8244a5ec5fa13a3bce77}}' - name: task-name value: print-op-3 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-op-3-driver template: system-container-driver - arguments: @@ -412,45 +458,17 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-op-3-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-op-3-driver.Succeeded name: print-op-3 template: system-container-executor inputs: parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-6 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-6}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-6"},"inputs":{"parameters":{"pipelinechannel--loop-item-param-3":{"componentInputParameter":"pipelinechannel--loop-item-param-3"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-5","items":{"raw":"[\"100\", - \"200\", \"300\"]"}},"taskInfo":{"name":"for-loop-6"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-6 - inputs: - parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-6-iteration + name: comp-for-loop-6 outputs: {} - dag: tasks: @@ -458,27 +476,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-6}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-6"},"inputs":{"parameters":{"pipelinechannel--loop-item-param-3":{"componentInputParameter":"pipelinechannel--loop-item-param-3"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-5","items":{"raw":"[\"100\", \"200\", \"300\"]"}},"taskInfo":{"name":"for-loop-6"}}' + - name: task-name + value: for-loop-6 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-6-iteration + template: comp-for-loop-6 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-6-for-loop-6-iterator outputs: {} @@ -486,8 +506,10 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: for-loop-6 template: comp-for-loop-6-for-loop-6-iterator - arguments: @@ -500,8 +522,10 @@ spec: value: '{{workflow.parameters.implementations-8fce140f5724af7f5973bdf372e2b23f2cbf1b9e99ac8244a5ec5fa13a3bce77}}' - name: task-name value: print-op-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-op-2-driver template: system-container-driver - arguments: @@ -511,44 +535,17 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-op-2-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-op-2-driver.Succeeded name: print-op-2 template: system-container-executor inputs: parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-4 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-1}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-1"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-1 - inputs: - parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-1-iteration + name: comp-for-loop-4 outputs: {} - dag: tasks: @@ -556,26 +553,28 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-1"}}' + - name: task-name + value: for-loop-1 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-1-iteration + template: comp-for-loop-1 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-1-for-loop-1-iterator outputs: {} @@ -585,58 +584,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-4}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[\"1\", - \"2\"]"}},"taskInfo":{"name":"for-loop-4"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-4 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-4-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-4}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[\"1\", \"2\"]"}},"taskInfo":{"name":"for-loop-4"}}' + - name: task-name + value: for-loop-4 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-4-iteration + template: comp-for-loop-4 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-4-for-loop-4-iterator outputs: {} @@ -644,19 +614,19 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-1 template: comp-for-loop-1-for-loop-1-iterator - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-4 template: comp-for-loop-4-for-loop-4-iterator inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -674,8 +644,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_only_display_name.yaml b/test_data/compiled-workflows/pipeline_with_only_display_name.yaml index 43e442e28f3..62922db0a67 100644 --- a/test_data/compiled-workflows/pipeline_with_only_display_name.yaml +++ b/test_data/compiled-workflows/pipeline_with_only_display_name.yaml @@ -32,8 +32,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -60,6 +60,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -69,13 +82,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -97,6 +114,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -126,6 +151,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -136,6 +165,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -175,6 +207,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -199,8 +238,8 @@ spec: value: '{{workflow.parameters.implementations-cf9c81ac9e6ab0dcdd92cb89ed717317e681cb0645cb5ddfc4824b1de14346b3}}' - name: task-name value: echo - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: echo-driver template: system-container-driver - arguments: @@ -215,7 +254,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -231,8 +270,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -243,8 +282,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -257,6 +296,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -266,6 +318,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -275,8 +331,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -285,9 +341,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -296,6 +352,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -310,8 +374,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_outputs.yaml b/test_data/compiled-workflows/pipeline_with_outputs.yaml index 9fc10b45a82..edaf5a3c449 100644 --- a/test_data/compiled-workflows/pipeline_with_outputs.yaml +++ b/test_data/compiled-workflows/pipeline_with_outputs.yaml @@ -48,8 +48,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -76,6 +76,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -85,13 +98,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -113,6 +130,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -142,6 +167,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -152,6 +181,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -191,6 +223,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -215,8 +254,8 @@ spec: value: '{{workflow.parameters.implementations-50751e65c3e6d87ebc6013c4afbd5fca5a399580ffb9de27fe21ca0af60f37ba}}' - name: task-name value: print-op1 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op1-driver template: system-container-driver - arguments: @@ -239,8 +278,8 @@ spec: value: '{{workflow.parameters.implementations-3f2ba2471e2638f8b27a01f4563316e6e3e8c02bae402875e95b5b86e8de1ddc}}' - name: task-name value: print-op2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: print-op1.Succeeded name: print-op2-driver template: system-container-driver @@ -256,7 +295,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-inner-pipeline outputs: {} @@ -272,8 +311,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -284,8 +323,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -298,6 +337,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -307,6 +359,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -316,8 +372,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -326,9 +382,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -337,14 +393,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-inner-pipeline}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-inner-pipeline"},"inputs":{"parameters":{"msg":{"runtimeValue":{"constant":"world"}}}},"taskInfo":{"name":"inner-pipeline"}}' - name: task-name @@ -353,8 +417,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.inner-pipeline-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.inner-pipeline-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.inner-pipeline-driver.outputs.parameters.condition}}' depends: inner-pipeline-driver.Succeeded @@ -370,8 +434,8 @@ spec: value: '{{workflow.parameters.implementations-50751e65c3e6d87ebc6013c4afbd5fca5a399580ffb9de27fe21ca0af60f37ba}}' - name: task-name value: print-op1 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op1-driver template: system-container-driver - arguments: @@ -386,7 +450,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -404,8 +468,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_parallelfor_parallelism.yaml b/test_data/compiled-workflows/pipeline_with_parallelfor_parallelism.yaml index 8ac5f05fb46..3eddaf15927 100644 --- a/test_data/compiled-workflows/pipeline_with_parallelfor_parallelism.yaml +++ b/test_data/compiled-workflows/pipeline_with_parallelfor_parallelism.yaml @@ -139,8 +139,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -167,6 +167,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -176,13 +189,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -204,6 +221,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -233,6 +258,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -243,6 +272,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -282,6 +314,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -306,8 +345,10 @@ spec: value: '{{workflow.parameters.implementations-a37ae37968ac30a4c2376035e115521a351c56fae22ef37e91f7df600a415f7d}}' - name: task-name value: print-text-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-text-2-driver template: system-container-driver - arguments: @@ -317,12 +358,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-text-2-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-text-2-driver.Succeeded name: print-text-2 template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-2 outputs: {} @@ -338,8 +382,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -350,8 +394,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -364,6 +408,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -373,6 +430,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -382,8 +443,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -392,9 +453,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -403,62 +464,42 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"pipelinechannel--loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-2"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-2 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-2-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"pipelinechannel--loop_parameter"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-2"}}' + - name: task-name + value: for-loop-2 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-2-iteration + template: comp-for-loop-2 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-2-for-loop-2-iterator outputs: {} @@ -466,8 +507,10 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: for-loop-2 template: comp-for-loop-2-for-loop-2-iterator - arguments: @@ -480,8 +523,10 @@ spec: value: '{{workflow.parameters.implementations-a37ae37968ac30a4c2376035e115521a351c56fae22ef37e91f7df600a415f7d}}' - name: task-name value: print-text - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-text-driver template: system-container-driver - arguments: @@ -491,12 +536,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-text-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-text-driver.Succeeded name: print-text template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-1 outputs: {} @@ -512,8 +560,10 @@ spec: value: '{{workflow.parameters.implementations-7898b23e6d22eaa693de615bf8e0d54ba7a75b5eacd5bc1ba5fa2a79afa961f8}}' - name: task-name value: print-int-3 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-int-3-driver template: system-container-driver - arguments: @@ -523,12 +573,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-int-3-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-int-3-driver.Succeeded name: print-int-3 template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-10 outputs: {} @@ -544,8 +597,10 @@ spec: value: '{{workflow.parameters.implementations-7898b23e6d22eaa693de615bf8e0d54ba7a75b5eacd5bc1ba5fa2a79afa961f8}}' - name: task-name value: print-int-4 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-int-4-driver template: system-container-driver - arguments: @@ -555,12 +610,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-int-4-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-int-4-driver.Succeeded name: print-int-4 template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-11 outputs: {} @@ -576,8 +634,10 @@ spec: value: '{{workflow.parameters.implementations-7898b23e6d22eaa693de615bf8e0d54ba7a75b5eacd5bc1ba5fa2a79afa961f8}}' - name: task-name value: print-int-5 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-int-5-driver template: system-container-driver - arguments: @@ -587,12 +647,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-int-5-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-int-5-driver.Succeeded name: print-int-5 template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-12 outputs: {} @@ -608,8 +671,10 @@ spec: value: '{{workflow.parameters.implementations-7898b23e6d22eaa693de615bf8e0d54ba7a75b5eacd5bc1ba5fa2a79afa961f8}}' - name: task-name value: print-int-6 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-int-6-driver template: system-container-driver - arguments: @@ -619,12 +684,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-int-6-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-int-6-driver.Succeeded name: print-int-6 template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-13 outputs: {} @@ -640,8 +708,10 @@ spec: value: '{{workflow.parameters.implementations-a37ae37968ac30a4c2376035e115521a351c56fae22ef37e91f7df600a415f7d}}' - name: task-name value: print-text-5 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-text-5-driver template: system-container-driver - arguments: @@ -651,6 +721,8 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-text-5-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-text-5-driver.Succeeded name: print-text-5 template: system-container-executor @@ -664,8 +736,10 @@ spec: value: '{{workflow.parameters.implementations-a37ae37968ac30a4c2376035e115521a351c56fae22ef37e91f7df600a415f7d}}' - name: task-name value: print-text-6 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-text-6-driver template: system-container-driver - arguments: @@ -675,45 +749,17 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-text-6-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-text-6-driver.Succeeded name: print-text-6 template: system-container-executor inputs: parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-6 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-6}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-6"},"iteratorPolicy":{"parallelismLimit":1},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-5","items":{"raw":"[{\"A_a\": - \"10\", \"B_b\": \"20\"}, {\"A_a\": \"100\", \"B_b\": \"200\"}]"}},"taskInfo":{"name":"for-loop-6"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-6 - inputs: - parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-6-iteration + name: comp-for-loop-6 outputs: {} - dag: tasks: @@ -721,27 +767,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-6}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-6"},"iteratorPolicy":{"parallelismLimit":1},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-5","items":{"raw":"[{\"A_a\": \"10\", \"B_b\": \"20\"}, {\"A_a\": \"100\", \"B_b\": \"200\"}]"}},"taskInfo":{"name":"for-loop-6"}}' + - name: task-name + value: for-loop-6 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-6-iteration + template: comp-for-loop-6 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-6-for-loop-6-iterator outputs: {} @@ -750,8 +798,10 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: for-loop-6 template: comp-for-loop-6-for-loop-6-iterator - arguments: @@ -764,8 +814,10 @@ spec: value: '{{workflow.parameters.implementations-a37ae37968ac30a4c2376035e115521a351c56fae22ef37e91f7df600a415f7d}}' - name: task-name value: print-text-3 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-text-3-driver template: system-container-driver - arguments: @@ -775,6 +827,8 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-text-3-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-text-3-driver.Succeeded name: print-text-3 template: system-container-executor @@ -788,8 +842,10 @@ spec: value: '{{workflow.parameters.implementations-a37ae37968ac30a4c2376035e115521a351c56fae22ef37e91f7df600a415f7d}}' - name: task-name value: print-text-4 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-text-4-driver template: system-container-driver - arguments: @@ -799,12 +855,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-text-4-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-text-4-driver.Succeeded name: print-text-4 template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-4 outputs: {} @@ -820,8 +879,10 @@ spec: value: '{{workflow.parameters.implementations-7898b23e6d22eaa693de615bf8e0d54ba7a75b5eacd5bc1ba5fa2a79afa961f8}}' - name: task-name value: print-int - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-int-driver template: system-container-driver - arguments: @@ -831,12 +892,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-int-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-int-driver.Succeeded name: print-int template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-8 outputs: {} @@ -852,8 +916,10 @@ spec: value: '{{workflow.parameters.implementations-7898b23e6d22eaa693de615bf8e0d54ba7a75b5eacd5bc1ba5fa2a79afa961f8}}' - name: task-name value: print-int-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-int-2-driver template: system-container-driver - arguments: @@ -863,44 +929,17 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-int-2-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-int-2-driver.Succeeded name: print-int-2 template: system-container-executor inputs: parameters: - - name: parent-dag-id - metadata: {} - name: comp-for-loop-9 - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-1}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"}}},"iteratorPolicy":{"parallelismLimit":2},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-1"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-1 - inputs: - parameters: - - name: parent-dag-id + - name: parent-dag-task-id - name: iteration-index metadata: {} - name: comp-for-loop-1-iteration + name: comp-for-loop-9 outputs: {} - dag: tasks: @@ -908,26 +947,28 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--loop_parameter":{"componentInputParameter":"loop_parameter"}}},"iteratorPolicy":{"parallelismLimit":2},"parameterIterator":{"itemInput":"pipelinechannel--loop_parameter-loop-item","items":{"inputParameter":"pipelinechannel--loop_parameter"}},"taskInfo":{"name":"for-loop-1"}}' + - name: task-name + value: for-loop-1 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-1-iteration + template: comp-for-loop-1 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-1-for-loop-1-iterator outputs: {} @@ -938,56 +979,28 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-10}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-10"},"dependentTasks":["list-dict-maker-1"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-1-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-1"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-1-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-1-Output"}},"taskInfo":{"name":"for-loop-10"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-10 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-10-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-10}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-10"},"dependentTasks":["list-dict-maker-1"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-1-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-1"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-1-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-1-Output"}},"taskInfo":{"name":"for-loop-10"}}' + - name: task-name + value: for-loop-10 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-10-iteration + template: comp-for-loop-10 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-10-for-loop-10-iterator outputs: {} @@ -997,56 +1010,28 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-11}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-11"},"dependentTasks":["list-dict-maker-2"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-2"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-2-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-2-Output"}},"taskInfo":{"name":"for-loop-11"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-11 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-11-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-11}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-11"},"dependentTasks":["list-dict-maker-2"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-2"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-2-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-2-Output"}},"taskInfo":{"name":"for-loop-11"}}' + - name: task-name + value: for-loop-11 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-11-iteration + template: comp-for-loop-11 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-11-for-loop-11-iterator outputs: {} @@ -1056,56 +1041,28 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-12}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-12"},"dependentTasks":["list-dict-maker-3"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-3-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-3"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-3-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-3-Output"}},"taskInfo":{"name":"for-loop-12"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-12 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-12-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-12}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-12"},"dependentTasks":["list-dict-maker-3"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-3-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-3"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-3-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-3-Output"}},"taskInfo":{"name":"for-loop-12"}}' + - name: task-name + value: for-loop-12 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-12-iteration + template: comp-for-loop-12 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-12-for-loop-12-iterator outputs: {} @@ -1115,56 +1072,28 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-13}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-13"},"dependentTasks":["list-dict-maker-1-2"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-1-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-1-2"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-1-2-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-1-2-Output"}},"taskInfo":{"name":"for-loop-13"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-13 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-13-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-13}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-13"},"dependentTasks":["list-dict-maker-1-2"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-1-2-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-1-2"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-1-2-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-1-2-Output"}},"taskInfo":{"name":"for-loop-13"}}' + - name: task-name + value: for-loop-13 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-13-iteration + template: comp-for-loop-13 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-13-for-loop-13-iterator outputs: {} @@ -1174,58 +1103,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-4}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[{\"A_a\": - \"1\", \"B_b\": \"2\"}, {\"A_a\": \"10\", \"B_b\": \"20\"}]"}},"taskInfo":{"name":"for-loop-4"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-4 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-4-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-4}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-4"},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-3","items":{"raw":"[{\"A_a\": \"1\", \"B_b\": \"2\"}, {\"A_a\": \"10\", \"B_b\": \"20\"}]"}},"taskInfo":{"name":"for-loop-4"}}' + - name: task-name + value: for-loop-4 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-4-iteration + template: comp-for-loop-4 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-4-for-loop-4-iterator outputs: {} @@ -1235,58 +1135,29 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-8}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-8"},"iteratorPolicy":{"parallelismLimit":1},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-7","items":{"raw":"[{\"a\": - 1, \"b\": 2}, {\"a\": 2, \"b\": 3}, {\"a\": 3, \"b\": 4}]"}},"taskInfo":{"name":"for-loop-8"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-8 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-8-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-8}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-8"},"iteratorPolicy":{"parallelismLimit":1},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-7","items":{"raw":"[{\"a\": 1, \"b\": 2}, {\"a\": 2, \"b\": 3}, {\"a\": 3, \"b\": 4}]"}},"taskInfo":{"name":"for-loop-8"}}' + - name: task-name + value: for-loop-8 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-8-iteration + template: comp-for-loop-8 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-8-for-loop-8-iterator outputs: {} @@ -1297,56 +1168,28 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-9}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-9"},"dependentTasks":["list-dict-maker-0"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-0-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-0"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-0-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-0-Output"}},"taskInfo":{"name":"for-loop-9"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-9 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-9-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-9}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-9"},"dependentTasks":["list-dict-maker-0"],"inputs":{"parameters":{"pipelinechannel--list-dict-maker-0-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"list-dict-maker-0"}}}},"parameterIterator":{"itemInput":"pipelinechannel--list-dict-maker-0-Output-loop-item","items":{"inputParameter":"pipelinechannel--list-dict-maker-0-Output"}},"taskInfo":{"name":"for-loop-9"}}' + - name: task-name + value: for-loop-9 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-9-iteration + template: comp-for-loop-9 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-9-for-loop-9-iterator outputs: {} @@ -1354,54 +1197,54 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-1 template: comp-for-loop-1-for-loop-1-iterator - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: list-dict-maker-1.Succeeded name: for-loop-10 template: comp-for-loop-10-for-loop-10-iterator - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: list-dict-maker-2.Succeeded name: for-loop-11 template: comp-for-loop-11-for-loop-11-iterator - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: list-dict-maker-3.Succeeded name: for-loop-12 template: comp-for-loop-12-for-loop-12-iterator - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: list-dict-maker-1-2.Succeeded name: for-loop-13 template: comp-for-loop-13-for-loop-13-iterator - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-4 template: comp-for-loop-4-for-loop-4-iterator - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-8 template: comp-for-loop-8-for-loop-8-iterator - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: list-dict-maker-0.Succeeded name: for-loop-9 template: comp-for-loop-9-for-loop-9-iterator @@ -1415,8 +1258,8 @@ spec: value: '{{workflow.parameters.implementations-06195db0e7bb31c20391b65c5986a0f0166ba2c57e589e9f8ec5ff0c8ab2bf6b}}' - name: task-name value: list-dict-maker-0 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: list-dict-maker-0-driver template: system-container-driver - arguments: @@ -1439,8 +1282,8 @@ spec: value: '{{workflow.parameters.implementations-01c5f47f9250430c22049f8c5a56a816a7b031058d432946165fb45bb014e6c4}}' - name: task-name value: list-dict-maker-1 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: list-dict-maker-1-driver template: system-container-driver - arguments: @@ -1463,8 +1306,8 @@ spec: value: '{{workflow.parameters.implementations-01c5f47f9250430c22049f8c5a56a816a7b031058d432946165fb45bb014e6c4}}' - name: task-name value: list-dict-maker-1-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: list-dict-maker-1-2-driver template: system-container-driver - arguments: @@ -1487,8 +1330,8 @@ spec: value: '{{workflow.parameters.implementations-cccaadf427920aa88170927a2a48949cff1715df09e55aedcdc2a5713766b51c}}' - name: task-name value: list-dict-maker-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: list-dict-maker-2-driver template: system-container-driver - arguments: @@ -1511,8 +1354,8 @@ spec: value: '{{workflow.parameters.implementations-7fcc431058b62e0fd2bab27c674e9083faadc5d7b7d9a6fccd78345c0fd58286}}' - name: task-name value: list-dict-maker-3 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: list-dict-maker-3-driver template: system-container-driver - arguments: @@ -1527,7 +1370,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -1545,8 +1388,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_params_containing_format.yaml b/test_data/compiled-workflows/pipeline_with_params_containing_format.yaml index 1c3edcf3e50..0959860691b 100644 --- a/test_data/compiled-workflows/pipeline_with_params_containing_format.yaml +++ b/test_data/compiled-workflows/pipeline_with_params_containing_format.yaml @@ -61,8 +61,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -89,6 +89,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -98,13 +111,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -126,6 +143,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -155,6 +180,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -165,6 +194,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -204,6 +236,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -229,8 +268,10 @@ spec: value: '{{workflow.parameters.implementations-eceb448d63a8e48362b7a15bc3e511d1455ffdbb43c514e0a03d7c39df3c06be}}' - name: task-name value: print-op2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: print-op2-driver template: system-container-driver - arguments: @@ -240,12 +281,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.print-op2-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: print-op2-driver.Succeeded name: print-op2 template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-2 outputs: {} @@ -261,8 +305,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -273,8 +317,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -287,6 +331,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -296,6 +353,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -305,8 +366,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -315,9 +376,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -326,64 +387,43 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--name":{"componentInputParameter":"name"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[\"1\", - \"2\"]"}},"taskInfo":{"name":"for-loop-2"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-2 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-2-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-2}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-2"},"inputs":{"parameters":{"pipelinechannel--name":{"componentInputParameter":"name"}}},"parameterIterator":{"itemInput":"pipelinechannel--loop-item-param-1","items":{"raw":"[\"1\", \"2\"]"}},"taskInfo":{"name":"for-loop-2"}}' + - name: task-name + value: for-loop-2 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-2-iteration + template: comp-for-loop-2 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-2-for-loop-2-iterator outputs: {} @@ -391,8 +431,8 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-2 template: comp-for-loop-2-for-loop-2-iterator - arguments: @@ -406,8 +446,8 @@ spec: value: '{{workflow.parameters.implementations-321fe99287ca488cce9f936c3f6cfc0f77e2412bb1dbc00af4495c6701e41a88}}' - name: task-name value: print-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op-driver template: system-container-driver - arguments: @@ -431,8 +471,8 @@ spec: value: '{{workflow.parameters.implementations-321fe99287ca488cce9f936c3f6cfc0f77e2412bb1dbc00af4495c6701e41a88}}' - name: task-name value: print-op-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: print-op.Succeeded name: print-op-2-driver template: system-container-driver @@ -448,7 +488,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -466,8 +506,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_placeholders.yaml b/test_data/compiled-workflows/pipeline_with_placeholders.yaml index e5f87de8121..7597a5fb778 100644 --- a/test_data/compiled-workflows/pipeline_with_placeholders.yaml +++ b/test_data/compiled-workflows/pipeline_with_placeholders.yaml @@ -48,8 +48,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -76,6 +76,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -85,13 +98,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -113,6 +130,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -142,6 +167,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -152,6 +181,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -191,6 +223,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -215,8 +254,8 @@ spec: value: '{{workflow.parameters.implementations-252e4a170354e881b8fd2729c1e3ee58684e5eee1443c6782901dd4330265f5e}}' - name: task-name value: print-all-placeholders - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-all-placeholders-driver template: system-container-driver - arguments: @@ -231,7 +270,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -247,8 +286,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -259,8 +298,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -273,6 +312,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -282,6 +334,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -291,8 +347,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -301,9 +357,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -312,6 +368,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -326,8 +390,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_pod_metadata.yaml b/test_data/compiled-workflows/pipeline_with_pod_metadata.yaml index 8605d61e14b..00cabc988cd 100644 --- a/test_data/compiled-workflows/pipeline_with_pod_metadata.yaml +++ b/test_data/compiled-workflows/pipeline_with_pod_metadata.yaml @@ -163,8 +163,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -191,6 +191,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -200,13 +213,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -228,6 +245,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -257,6 +282,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -267,6 +296,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -306,6 +338,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -365,6 +404,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -375,6 +418,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -425,6 +471,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -508,6 +561,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -518,6 +575,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -580,6 +640,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -633,6 +700,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -643,6 +714,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -689,6 +763,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -748,6 +829,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -758,6 +843,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -807,6 +895,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -831,8 +926,8 @@ spec: value: '{{workflow.parameters.implementations-3e1061118c8caaf7fb2580deca4c26c68777ea648895af667b433198f8d95a35}}' - name: task-name value: validate-no-pod-metadata - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: kubernetes-config value: '{{workflow.parameters.kubernetes-comp-validate-no-pod-metadata}}' name: validate-no-pod-metadata-driver @@ -857,8 +952,8 @@ spec: value: '{{workflow.parameters.implementations-36b4f5715a9e847c587a42d8970df700b9d7977c0097b0b0a12b7be3bc7cb4f8}}' - name: task-name value: validate-pod-metadata-task-a - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: kubernetes-config value: '{{workflow.parameters.kubernetes-comp-validate-pod-metadata-task-a}}' name: validate-pod-metadata-task-a-driver @@ -895,8 +990,8 @@ spec: value: '{{workflow.parameters.implementations-e07ac8a27d331c96ac2d5559c43f483a79d4e8956b1941d0e306aee450ad83f1}}' - name: task-name value: validate-pod-metadata-task-b - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: kubernetes-config value: '{{workflow.parameters.kubernetes-comp-validate-pod-metadata-task-b}}' name: validate-pod-metadata-task-b-driver @@ -949,8 +1044,8 @@ spec: value: '{{workflow.parameters.implementations-08b02a2c7b00c133f7d053207ab1e549fe9374102122ca84dcd6f5a15683fe01}}' - name: task-name value: validate-pod-metadata-task-c - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: kubernetes-config value: '{{workflow.parameters.kubernetes-comp-validate-pod-metadata-task-c}}' name: validate-pod-metadata-task-c-driver @@ -983,8 +1078,8 @@ spec: value: '{{workflow.parameters.implementations-d56a2097f634e26b0381658dc361347f393e19552ad2c43e9b6b0b6606678502}}' - name: task-name value: validate-pod-metadata-task-d - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: kubernetes-config value: '{{workflow.parameters.kubernetes-comp-validate-pod-metadata-task-d}}' name: validate-pod-metadata-task-d-driver @@ -1013,7 +1108,7 @@ spec: template: metadata-0-3-system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -1029,8 +1124,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -1041,8 +1136,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -1055,6 +1150,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -1064,6 +1172,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -1073,8 +1185,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -1083,9 +1195,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -1094,6 +1206,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -1108,8 +1228,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_retry.yaml b/test_data/compiled-workflows/pipeline_with_retry.yaml index d8fe0a9855f..e613c703f84 100644 --- a/test_data/compiled-workflows/pipeline_with_retry.yaml +++ b/test_data/compiled-workflows/pipeline_with_retry.yaml @@ -41,8 +41,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -69,6 +69,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -78,13 +91,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -106,6 +123,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -151,6 +176,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -161,6 +190,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -210,6 +242,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -234,8 +273,8 @@ spec: value: '{{workflow.parameters.implementations-2815fa01c723249ff7034b9449ad45f7da5d129882c5d188191c2e30f2847773}}' - name: task-name value: add - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: add-driver template: system-container-driver - arguments: @@ -258,7 +297,7 @@ spec: template: retry-system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -274,8 +313,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -286,8 +325,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -300,6 +339,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -309,6 +361,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -318,8 +374,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -328,9 +384,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -339,6 +395,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -353,8 +417,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_reused_component.yaml b/test_data/compiled-workflows/pipeline_with_reused_component.yaml index 1c9d0e640a2..f2d010acef8 100644 --- a/test_data/compiled-workflows/pipeline_with_reused_component.yaml +++ b/test_data/compiled-workflows/pipeline_with_reused_component.yaml @@ -41,8 +41,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -69,6 +69,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -78,13 +91,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -106,6 +123,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -135,6 +160,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -145,6 +174,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -184,6 +216,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -208,8 +247,8 @@ spec: value: '{{workflow.parameters.implementations-a06e49d55601ed1e78432721afac56c959e05f0346dc650f749ccab33c0e425e}}' - name: task-name value: add-numbers - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: add-numbers-driver template: system-container-driver - arguments: @@ -232,8 +271,8 @@ spec: value: '{{workflow.parameters.implementations-a06e49d55601ed1e78432721afac56c959e05f0346dc650f749ccab33c0e425e}}' - name: task-name value: add-numbers-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: add-numbers.Succeeded name: add-numbers-2-driver template: system-container-driver @@ -257,8 +296,8 @@ spec: value: '{{workflow.parameters.implementations-a06e49d55601ed1e78432721afac56c959e05f0346dc650f749ccab33c0e425e}}' - name: task-name value: add-numbers-3 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: add-numbers-2.Succeeded name: add-numbers-3-driver template: system-container-driver @@ -274,7 +313,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -290,8 +329,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -302,8 +341,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -316,6 +355,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -325,6 +377,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -334,8 +390,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -344,9 +400,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -355,6 +411,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -369,8 +433,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_secret_as_env.yaml b/test_data/compiled-workflows/pipeline_with_secret_as_env.yaml index 97ff856c299..8bc235a49a8 100644 --- a/test_data/compiled-workflows/pipeline_with_secret_as_env.yaml +++ b/test_data/compiled-workflows/pipeline_with_secret_as_env.yaml @@ -60,8 +60,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -88,6 +88,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -97,13 +110,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -125,6 +142,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -154,6 +179,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -164,6 +193,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -203,6 +235,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -227,8 +266,8 @@ spec: value: '{{workflow.parameters.implementations-f5f5569b7b865a5b2950a75dfc385075059e8dc453c48f01c473f3b01f0a4027}}' - name: task-name value: comp - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: kubernetes-config value: '{{workflow.parameters.kubernetes-comp-comp}}' depends: generate-secret-name.Succeeded @@ -254,8 +293,8 @@ spec: value: '{{workflow.parameters.implementations-a6f0a699a84a6a4255ba12971522f2e31ce71807004b6bd0c8b832f750351283}}' - name: task-name value: generate-secret-name - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: generate-secret-name-driver template: system-container-driver - arguments: @@ -270,7 +309,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -286,8 +325,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -298,8 +337,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -312,6 +351,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -321,6 +373,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -330,8 +386,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -340,9 +396,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -351,6 +407,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -365,8 +429,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_secret_as_volume.yaml b/test_data/compiled-workflows/pipeline_with_secret_as_volume.yaml index 60930b69d2d..8589ef23bf0 100644 --- a/test_data/compiled-workflows/pipeline_with_secret_as_volume.yaml +++ b/test_data/compiled-workflows/pipeline_with_secret_as_volume.yaml @@ -48,8 +48,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -76,6 +76,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -85,13 +98,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -113,6 +130,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -142,6 +167,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -152,6 +181,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -191,6 +223,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -215,8 +254,8 @@ spec: value: '{{workflow.parameters.implementations-552dd41fa2418575aa5ae289a30234ba1e45c44408832759059a616eeb2327ba}}' - name: task-name value: comp - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: kubernetes-config value: '{{workflow.parameters.kubernetes-comp-comp}}' name: comp-driver @@ -233,7 +272,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -249,8 +288,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -261,8 +300,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -275,6 +314,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -284,6 +336,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -293,8 +349,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -303,9 +359,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -314,6 +370,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -328,8 +392,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_string_machine_fields_pipeline_input.yaml b/test_data/compiled-workflows/pipeline_with_string_machine_fields_pipeline_input.yaml index cd416d5126c..3d8b0a4fed7 100644 --- a/test_data/compiled-workflows/pipeline_with_string_machine_fields_pipeline_input.yaml +++ b/test_data/compiled-workflows/pipeline_with_string_machine_fields_pipeline_input.yaml @@ -41,8 +41,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -69,6 +69,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -78,13 +91,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -106,6 +123,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -135,6 +160,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -145,6 +174,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -184,6 +216,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -208,8 +247,8 @@ spec: value: '{{workflow.parameters.implementations-ac7285064d7d99b0f19ca3e34669996b38e3e05eda2a6b78e0b3463b1a2a6186}}' - name: task-name value: sum-numbers - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: sum-numbers-driver template: system-container-driver - arguments: @@ -224,7 +263,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -240,8 +279,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -252,8 +291,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -266,6 +305,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -275,6 +327,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -284,8 +340,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -294,9 +350,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -305,6 +361,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -319,8 +383,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_string_machine_fields_task_output.yaml b/test_data/compiled-workflows/pipeline_with_string_machine_fields_task_output.yaml index 74184b94bff..da11eab51c0 100644 --- a/test_data/compiled-workflows/pipeline_with_string_machine_fields_task_output.yaml +++ b/test_data/compiled-workflows/pipeline_with_string_machine_fields_task_output.yaml @@ -72,7 +72,7 @@ spec: kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef sum_numbers(a: int, b: int) -\u003e int:\n return a + b\n\n"],"image":"python:3.11","resources":{"accelerator":{"resourceCount":"{{$.inputs.parameters[''pipelinechannel--accelerator-limit-Output'']}}","resourceType":"{{$.inputs.parameters[''pipelinechannel--accelerator-type-Output'']}}"},"resourceCpuLimit":"{{$.inputs.parameters[''pipelinechannel--cpu-limit-Output'']}}","resourceMemoryLimit":"{{$.inputs.parameters[''pipelinechannel--memory-limit-Output'']}}"}}' - name: components-root - value: '{"dag":{"tasks":{"accelerator-limit":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-accelerator-limit"},"taskInfo":{"name":"accelerator-limit"}},"accelerator-type":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-accelerator-type"},"taskInfo":{"name":"accelerator-type"}},"cpu-limit":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-cpu-limit"},"taskInfo":{"name":"cpu-limit"}},"memory-limit":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-memory-limit"},"taskInfo":{"name":"memory-limit"}},"sum-numbers":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-sum-numbers"},"dependentTasks":["accelerator-limit","accelerator-type","memory-limit","cpu-limit"],"inputs":{"parameters":{"a":{"runtimeValue":{"constant":1}},"accelerator_count":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--accelerator-limit-Output'']}}"}},"accelerator_type":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--accelerator-type-Output'']}}"}},"b":{"runtimeValue":{"constant":2}},"cpu_limit":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--cpu-limit-Output'']}}"}},"memory_limit":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--memory-limit-Output'']}}"}},"pipelinechannel--accelerator-limit-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-limit"}},"pipelinechannel--accelerator-type-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-type"}},"pipelinechannel--cpu-limit-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"cpu-limit"}},"pipelinechannel--memory-limit-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"memory-limit"}}}},"taskInfo":{"name":"sum-numbers"}}}}}' + value: '{"dag":{"tasks":{"accelerator-limit":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-accelerator-limit"},"taskInfo":{"name":"accelerator-limit"}},"accelerator-type":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-accelerator-type"},"taskInfo":{"name":"accelerator-type"}},"cpu-limit":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-cpu-limit"},"taskInfo":{"name":"cpu-limit"}},"memory-limit":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-memory-limit"},"taskInfo":{"name":"memory-limit"}},"sum-numbers":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-sum-numbers"},"dependentTasks":["accelerator-limit","accelerator-type","cpu-limit","memory-limit"],"inputs":{"parameters":{"a":{"runtimeValue":{"constant":1}},"accelerator_count":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--accelerator-limit-Output'']}}"}},"accelerator_type":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--accelerator-type-Output'']}}"}},"b":{"runtimeValue":{"constant":2}},"cpu_limit":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--cpu-limit-Output'']}}"}},"memory_limit":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--memory-limit-Output'']}}"}},"pipelinechannel--accelerator-limit-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-limit"}},"pipelinechannel--accelerator-type-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-type"}},"pipelinechannel--cpu-limit-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"cpu-limit"}},"pipelinechannel--memory-limit-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"memory-limit"}}}},"taskInfo":{"name":"sum-numbers"}}}}}' entrypoint: entrypoint podMetadata: annotations: @@ -93,8 +93,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -121,6 +121,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -130,13 +143,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -158,6 +175,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -187,6 +212,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -197,6 +226,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -236,6 +268,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -260,8 +299,8 @@ spec: value: '{{workflow.parameters.implementations-12837483d1ac7aafcef7d75cb8553ad734a85089a7511ff009cbace2b8f18312}}' - name: task-name value: accelerator-limit - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: accelerator-limit-driver template: system-container-driver - arguments: @@ -284,8 +323,8 @@ spec: value: '{{workflow.parameters.implementations-f9a25b0b0eb8f4240e030d4007d8ba04dd922f7ac266816c8751d15ad42c5236}}' - name: task-name value: accelerator-type - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: accelerator-type-driver template: system-container-driver - arguments: @@ -308,8 +347,8 @@ spec: value: '{{workflow.parameters.implementations-3e42152114e50a3fa222dc0a74cb1a91431ac56c168a1fc6a6c12749d1c24939}}' - name: task-name value: cpu-limit - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: cpu-limit-driver template: system-container-driver - arguments: @@ -332,8 +371,8 @@ spec: value: '{{workflow.parameters.implementations-85f92f571187f62f60b0032a62bbab38678ebb5c899ba184021054c62fdb8a1c}}' - name: task-name value: memory-limit - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: memory-limit-driver template: system-container-driver - arguments: @@ -351,15 +390,15 @@ spec: - name: component value: '{{workflow.parameters.components-49f9a898b718a077f30b7fd8c02d39767cff91ff0bbda4379daf866a91dbdb1b}}' - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-sum-numbers"},"dependentTasks":["accelerator-limit","accelerator-type","memory-limit","cpu-limit"],"inputs":{"parameters":{"a":{"runtimeValue":{"constant":1}},"accelerator_count":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--accelerator-limit-Output'']}}"}},"accelerator_type":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--accelerator-type-Output'']}}"}},"b":{"runtimeValue":{"constant":2}},"cpu_limit":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--cpu-limit-Output'']}}"}},"memory_limit":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--memory-limit-Output'']}}"}},"pipelinechannel--accelerator-limit-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-limit"}},"pipelinechannel--accelerator-type-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-type"}},"pipelinechannel--cpu-limit-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"cpu-limit"}},"pipelinechannel--memory-limit-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"memory-limit"}}}},"taskInfo":{"name":"sum-numbers"}}' + value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-sum-numbers"},"dependentTasks":["accelerator-limit","accelerator-type","cpu-limit","memory-limit"],"inputs":{"parameters":{"a":{"runtimeValue":{"constant":1}},"accelerator_count":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--accelerator-limit-Output'']}}"}},"accelerator_type":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--accelerator-type-Output'']}}"}},"b":{"runtimeValue":{"constant":2}},"cpu_limit":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--cpu-limit-Output'']}}"}},"memory_limit":{"runtimeValue":{"constant":"{{$.inputs.parameters[''pipelinechannel--memory-limit-Output'']}}"}},"pipelinechannel--accelerator-limit-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-limit"}},"pipelinechannel--accelerator-type-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"accelerator-type"}},"pipelinechannel--cpu-limit-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"cpu-limit"}},"pipelinechannel--memory-limit-Output":{"taskOutputParameter":{"outputParameterKey":"Output","producerTask":"memory-limit"}}}},"taskInfo":{"name":"sum-numbers"}}' - name: container value: '{{workflow.parameters.implementations-49f9a898b718a077f30b7fd8c02d39767cff91ff0bbda4379daf866a91dbdb1b}}' - name: task-name value: sum-numbers - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - depends: accelerator-limit.Succeeded && accelerator-type.Succeeded && memory-limit.Succeeded - && cpu-limit.Succeeded + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + depends: accelerator-limit.Succeeded && accelerator-type.Succeeded && cpu-limit.Succeeded + && memory-limit.Succeeded name: sum-numbers-driver template: system-container-driver - arguments: @@ -374,7 +413,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -390,8 +429,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -402,8 +441,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -416,6 +455,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -425,6 +477,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -434,8 +490,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -444,9 +500,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -455,6 +511,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -469,8 +533,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_submit_request.yaml b/test_data/compiled-workflows/pipeline_with_submit_request.yaml index d2215b3cfb4..da9c2dc9169 100644 --- a/test_data/compiled-workflows/pipeline_with_submit_request.yaml +++ b/test_data/compiled-workflows/pipeline_with_submit_request.yaml @@ -52,8 +52,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -80,6 +80,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -89,13 +102,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -117,6 +134,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -146,6 +171,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -156,6 +185,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -195,6 +227,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -219,8 +258,8 @@ spec: value: '{{workflow.parameters.implementations-b6bf28aee0444effd80c7574f7b4daa48e7dcc6786c440bbb62d4f5552b3981e}}' - name: task-name value: submit-request - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: submit-request-driver template: system-container-driver - arguments: @@ -243,8 +282,8 @@ spec: value: '{{workflow.parameters.implementations-b6bf28aee0444effd80c7574f7b4daa48e7dcc6786c440bbb62d4f5552b3981e}}' - name: task-name value: submit-request-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: submit-request-2-driver template: system-container-driver - arguments: @@ -259,7 +298,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -275,8 +314,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -287,8 +326,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -301,6 +340,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -310,6 +362,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -319,8 +375,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -329,9 +385,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -340,6 +396,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -354,8 +418,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_task_final_status.yaml b/test_data/compiled-workflows/pipeline_with_task_final_status.yaml index 9d7cde234ba..c3ca4c5771a 100644 --- a/test_data/compiled-workflows/pipeline_with_task_final_status.yaml +++ b/test_data/compiled-workflows/pipeline_with_task_final_status.yaml @@ -75,8 +75,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -103,6 +103,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -112,13 +125,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -140,6 +157,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -169,6 +194,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -179,6 +208,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -218,6 +250,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -243,8 +282,8 @@ spec: value: '{{workflow.parameters.implementations-f7d7acf968eee71dd35f342b2defffecd0b666d98ba3f84067666f01761ce844}}' - name: task-name value: fail-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: fail-op-driver template: system-container-driver - arguments: @@ -267,8 +306,8 @@ spec: value: '{{workflow.parameters.implementations-092cff624d6cd885e152ba4746a8171a94bbfad01e209a7d23d4499a65520f8d}}' - name: task-name value: print-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op-driver template: system-container-driver - arguments: @@ -283,7 +322,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-exit-handler-1 outputs: {} @@ -299,8 +338,8 @@ spec: value: '{{workflow.parameters.implementations-8c1d358f7eb020b42803091b690668b1d8107ef638cd1bb1ff074c364f18639f}}' - name: task-name value: exit-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: exit-op-driver template: system-container-driver - arguments: @@ -315,7 +354,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: exit-hook-root-exit-op outputs: {} @@ -331,8 +370,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -343,8 +382,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -357,6 +396,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -366,6 +418,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -375,8 +431,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -385,9 +441,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -396,14 +452,22 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-exit-handler-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-exit-handler-1"},"inputs":{"parameters":{"pipelinechannel--message":{"componentInputParameter":"message"}}},"taskInfo":{"name":"my-pipeline"}}' - name: task-name @@ -412,8 +476,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.exit-handler-1-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.exit-handler-1-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.exit-handler-1-driver.outputs.parameters.condition}}' depends: exit-handler-1-driver.Succeeded @@ -421,14 +485,14 @@ spec: exit: arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' template: exit-hook-root-exit-op name: exit-handler-1 template: comp-exit-handler-1 inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -446,8 +510,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_task_using_ignore_upstream_failure.yaml b/test_data/compiled-workflows/pipeline_with_task_using_ignore_upstream_failure.yaml index f9a6c0307f5..ef66d1266e5 100644 --- a/test_data/compiled-workflows/pipeline_with_task_using_ignore_upstream_failure.yaml +++ b/test_data/compiled-workflows/pipeline_with_task_using_ignore_upstream_failure.yaml @@ -55,8 +55,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -83,6 +83,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -92,13 +105,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -120,6 +137,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -149,6 +174,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -159,6 +188,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -198,6 +230,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -222,8 +261,8 @@ spec: value: '{{workflow.parameters.implementations-54651fb1cfb6bbf8377ab247bf3477ceefbfe476ff3687f7116d051f7f526f93}}' - name: task-name value: print-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: print-op-driver template: system-container-driver - arguments: @@ -238,7 +277,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: exit-hook-root-print-op outputs: {} @@ -254,8 +293,8 @@ spec: value: '{{workflow.parameters.implementations-efcc763bebb8f65a468ef9c3dd844fdcc92d0e17c005c84793244e2775cc61fe}}' - name: task-name value: fail-op - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: fail-op-driver template: system-container-driver - arguments: @@ -270,14 +309,14 @@ spec: exit: arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' template: exit-hook-root-print-op name: fail-op template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -293,8 +332,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -305,8 +344,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -319,6 +358,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -328,6 +380,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -337,8 +393,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -347,9 +403,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -358,6 +414,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -372,8 +436,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_utils.yaml b/test_data/compiled-workflows/pipeline_with_utils.yaml index 85a0ae926f3..f5c8d707240 100644 --- a/test_data/compiled-workflows/pipeline_with_utils.yaml +++ b/test_data/compiled-workflows/pipeline_with_utils.yaml @@ -45,8 +45,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -73,6 +73,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -82,13 +95,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -110,6 +127,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -139,6 +164,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -149,6 +178,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -188,6 +220,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -212,8 +251,8 @@ spec: value: '{{workflow.parameters.implementations-f3dee78c44ac18676c4c12c69e34f993a82f0c5af39391208a5dcd57c54c24fc}}' - name: task-name value: echo - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: echo-driver template: system-container-driver - arguments: @@ -228,7 +267,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -244,8 +283,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -256,8 +295,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -270,6 +309,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -279,6 +331,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -288,8 +344,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -298,9 +354,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -309,6 +365,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -323,8 +387,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_various_io_types.yaml b/test_data/compiled-workflows/pipeline_with_various_io_types.yaml index d8f59df2c4a..f82d7d5a4fb 100644 --- a/test_data/compiled-workflows/pipeline_with_various_io_types.yaml +++ b/test_data/compiled-workflows/pipeline_with_various_io_types.yaml @@ -36,8 +36,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -64,6 +64,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -73,13 +86,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -101,6 +118,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -130,6 +155,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -140,6 +169,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -179,6 +211,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -203,8 +242,8 @@ spec: value: '{{workflow.parameters.implementations-47f8556d9d5209135f0475cec07f23896de64558ee7eb4299cc3ebffcad12306}}' - name: task-name value: downstream - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: upstream.Succeeded name: downstream-driver template: system-container-driver @@ -228,8 +267,8 @@ spec: value: '{{workflow.parameters.implementations-3dd78b16c97f19cec3a05c91c163d2ef6f3cdee55ffd014e9fc805751d9f2ace}}' - name: task-name value: upstream - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: upstream-driver template: system-container-driver - arguments: @@ -244,7 +283,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -260,8 +299,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -272,8 +311,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -286,6 +325,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -295,6 +347,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -304,8 +360,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -314,9 +370,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -325,6 +381,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -339,8 +403,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_volume.yaml b/test_data/compiled-workflows/pipeline_with_volume.yaml index 360d128735d..cc1fa42d0f3 100644 --- a/test_data/compiled-workflows/pipeline_with_volume.yaml +++ b/test_data/compiled-workflows/pipeline_with_volume.yaml @@ -89,8 +89,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -117,6 +117,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -126,13 +139,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -154,6 +171,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -183,6 +208,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -193,6 +222,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -232,6 +264,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -256,8 +295,8 @@ spec: value: '{{workflow.parameters.implementations-d7fb91ca6b78edad062c1f44d01d414280f5740b5260436244e8b7638538eede}}' - name: task-name value: consumer - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: kubernetes-config value: '{{workflow.parameters.kubernetes-comp-consumer}}' depends: createpvc.Succeeded && producer.Succeeded @@ -283,8 +322,8 @@ spec: value: '{{workflow.parameters.implementations-98f254581598234b59377784d6cbf209de79e0bcda8013fe4c4397b5d3a26767}}' - name: task-name value: createpvc - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: createpvc template: system-container-driver - arguments: @@ -297,8 +336,8 @@ spec: value: '{{workflow.parameters.implementations-ecfc655dce17b0d317707d37fc226fb7de858cc93d45916945122484a13ef725}}' - name: task-name value: deletepvc - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: consumer.Succeeded && createpvc.Succeeded name: deletepvc template: system-container-driver @@ -312,8 +351,8 @@ spec: value: '{{workflow.parameters.implementations-35323c00ea64cb236ca34272f55bdb938f396af10887a294134939d9819557b9}}' - name: task-name value: producer - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: kubernetes-config value: '{{workflow.parameters.kubernetes-comp-producer}}' depends: createpvc.Succeeded @@ -331,7 +370,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -347,8 +386,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -359,8 +398,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -373,6 +412,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -382,6 +434,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -391,8 +447,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -401,9 +457,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -412,6 +468,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -426,8 +490,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_volume_no_cache.yaml b/test_data/compiled-workflows/pipeline_with_volume_no_cache.yaml index fc2d74a1863..6de568ac556 100644 --- a/test_data/compiled-workflows/pipeline_with_volume_no_cache.yaml +++ b/test_data/compiled-workflows/pipeline_with_volume_no_cache.yaml @@ -89,8 +89,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -117,6 +117,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -126,13 +139,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -154,6 +171,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -183,6 +208,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -193,6 +222,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -232,6 +264,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -256,8 +295,8 @@ spec: value: '{{workflow.parameters.implementations-d7fb91ca6b78edad062c1f44d01d414280f5740b5260436244e8b7638538eede}}' - name: task-name value: consumer - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: kubernetes-config value: '{{workflow.parameters.kubernetes-comp-consumer}}' depends: createpvc.Succeeded && producer.Succeeded @@ -283,8 +322,8 @@ spec: value: '{{workflow.parameters.implementations-98f254581598234b59377784d6cbf209de79e0bcda8013fe4c4397b5d3a26767}}' - name: task-name value: createpvc - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: createpvc template: system-container-driver - arguments: @@ -297,8 +336,8 @@ spec: value: '{{workflow.parameters.implementations-ecfc655dce17b0d317707d37fc226fb7de858cc93d45916945122484a13ef725}}' - name: task-name value: deletepvc - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: consumer.Succeeded && createpvc.Succeeded name: deletepvc template: system-container-driver @@ -312,8 +351,8 @@ spec: value: '{{workflow.parameters.implementations-35323c00ea64cb236ca34272f55bdb938f396af10887a294134939d9819557b9}}' - name: task-name value: producer - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: kubernetes-config value: '{{workflow.parameters.kubernetes-comp-producer}}' depends: createpvc.Succeeded @@ -331,7 +370,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -347,8 +386,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -359,8 +398,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -373,6 +412,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -382,6 +434,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -391,8 +447,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -401,9 +457,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -412,6 +468,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -426,8 +490,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pipeline_with_workspace.yaml b/test_data/compiled-workflows/pipeline_with_workspace.yaml index 77d1a90319e..cd55907fae9 100644 --- a/test_data/compiled-workflows/pipeline_with_workspace.yaml +++ b/test_data/compiled-workflows/pipeline_with_workspace.yaml @@ -65,8 +65,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -93,6 +93,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -102,13 +115,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -130,6 +147,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -159,6 +184,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -169,6 +198,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -208,6 +240,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -232,8 +271,8 @@ spec: value: '{{workflow.parameters.implementations-099322c2301dec986b734828a313953d589b6759b84e2204d3be55050a87abf3}}' - name: task-name value: read-from-workspace - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: write-to-workspace.Succeeded name: read-from-workspace-driver template: system-container-driver @@ -257,8 +296,8 @@ spec: value: '{{workflow.parameters.implementations-d94cb6155685654cc8749b6be9ebf3fb71fe13a85b91fd244acdd09265f42081}}' - name: task-name value: write-to-workspace - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: write-to-workspace-driver template: system-container-driver - arguments: @@ -273,7 +312,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -289,8 +328,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -301,8 +340,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -315,6 +354,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -324,6 +376,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -333,8 +389,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -343,9 +399,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -354,6 +410,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -368,8 +432,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/placeholder_with_if_placeholder_none_input_value.yaml b/test_data/compiled-workflows/placeholder_with_if_placeholder_none_input_value.yaml index 4bfc4183a8f..c6d404e7b84 100644 --- a/test_data/compiled-workflows/placeholder_with_if_placeholder_none_input_value.yaml +++ b/test_data/compiled-workflows/placeholder_with_if_placeholder_none_input_value.yaml @@ -35,8 +35,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -63,6 +63,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -72,13 +85,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -100,6 +117,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -129,6 +154,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -139,6 +168,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -178,6 +210,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -202,8 +241,8 @@ spec: value: '{{workflow.parameters.implementations-685cc8721e4d47ea4a7b1a8012621f5d72de4fa023d17bce3261c2c91f95ee2d}}' - name: task-name value: component-with-optional-inputs - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component-with-optional-inputs-driver template: system-container-driver - arguments: @@ -218,7 +257,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -234,8 +273,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -246,8 +285,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -260,6 +299,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -269,6 +321,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -278,8 +334,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -288,9 +344,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -299,6 +355,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -313,8 +377,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/preprocess.yaml b/test_data/compiled-workflows/preprocess.yaml index e6be2d4037b..7fd100421cc 100644 --- a/test_data/compiled-workflows/preprocess.yaml +++ b/test_data/compiled-workflows/preprocess.yaml @@ -63,8 +63,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -91,6 +91,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -100,13 +113,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -128,6 +145,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -157,6 +182,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -167,6 +196,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -206,6 +238,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -230,8 +269,8 @@ spec: value: '{{workflow.parameters.implementations-ea150e85c8a1d73e8fa5cc90cd210a9ae6d20007a7071b293f971fafdecbe6f4}}' - name: task-name value: preprocess - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: preprocess-driver template: system-container-driver - arguments: @@ -246,7 +285,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -262,8 +301,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -274,8 +313,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -288,6 +327,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -297,6 +349,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -306,8 +362,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -316,9 +372,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -327,6 +383,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -341,8 +405,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/producer_consumer_param_pipeline.yaml b/test_data/compiled-workflows/producer_consumer_param_pipeline.yaml index b4afe1c10f1..bc6c8796c88 100644 --- a/test_data/compiled-workflows/producer_consumer_param_pipeline.yaml +++ b/test_data/compiled-workflows/producer_consumer_param_pipeline.yaml @@ -39,8 +39,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -67,6 +67,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -76,13 +89,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -104,6 +121,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -133,6 +158,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -143,6 +172,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -182,6 +214,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -206,8 +245,8 @@ spec: value: '{{workflow.parameters.implementations-2de1cef765955c7d3216c1e98967e2fec098b8f68d6ee6836a324944ade35ee2}}' - name: task-name value: consumer - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: producer.Succeeded name: consumer-driver template: system-container-driver @@ -231,8 +270,8 @@ spec: value: '{{workflow.parameters.implementations-3111b7872fff0cb1b5ac026c55d212223c3295efb99dee1efb0ab793c8883ca6}}' - name: task-name value: producer - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: producer-driver template: system-container-driver - arguments: @@ -247,7 +286,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -263,8 +302,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -275,8 +314,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -289,6 +328,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -298,6 +350,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -307,8 +363,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -317,9 +373,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -328,6 +384,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -342,8 +406,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pvc_mount.yaml b/test_data/compiled-workflows/pvc_mount.yaml index 6f27cae6091..b241284b408 100644 --- a/test_data/compiled-workflows/pvc_mount.yaml +++ b/test_data/compiled-workflows/pvc_mount.yaml @@ -59,8 +59,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -87,6 +87,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -96,13 +109,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -124,6 +141,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -153,6 +178,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -163,6 +192,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -202,6 +234,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -226,8 +265,8 @@ spec: value: '{{workflow.parameters.implementations-ce3e01089ee23e8efa89e03cf85a7ac0e7d23e8c90eb9480c0ba530606857c31}}' - name: task-name value: consumer - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: kubernetes-config value: '{{workflow.parameters.kubernetes-comp-consumer}}' depends: producer.Succeeded @@ -253,8 +292,8 @@ spec: value: '{{workflow.parameters.implementations-e666b1854737f2a7cd7fb811fc0fca9cd637782450adc694b816d1980f0b2bd2}}' - name: task-name value: producer - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: kubernetes-config value: '{{workflow.parameters.kubernetes-comp-producer}}' name: producer-driver @@ -271,7 +310,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -287,8 +326,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -299,8 +338,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -313,6 +352,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -322,6 +374,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -331,8 +387,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -341,9 +397,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -352,6 +408,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -366,8 +430,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pythonic_artifact_with_single_return.yaml b/test_data/compiled-workflows/pythonic_artifact_with_single_return.yaml index 4434697a31a..03d37245bb5 100644 --- a/test_data/compiled-workflows/pythonic_artifact_with_single_return.yaml +++ b/test_data/compiled-workflows/pythonic_artifact_with_single_return.yaml @@ -44,26 +44,20 @@ spec: args: - --executor_type - importer - - --task_spec - - '{{inputs.parameters.task}}' - - --component_spec - - '{{inputs.parameters.component}}' + - --task_name + - '{{inputs.parameters.task-name}}' - --importer_spec - '{{inputs.parameters.importer}}' - --pipeline_name - make-language-model-pipeline - --run_id - '{{workflow.uid}}' - - --parent_dag_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --pod_name - $(KFP_POD_NAME) - --pod_uid - $(KFP_POD_UID) - - --mlmd_server_address - - $(METADATA_GRPC_SERVICE_HOST) - - --mlmd_server_port - - $(METADATA_GRPC_SERVICE_PORT) command: - launcher-v2 env: @@ -75,6 +69,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -88,15 +86,26 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - - name: task - - name: component + - name: task-name - name: importer - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: system-importer outputs: {} + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - container: args: - --type @@ -109,8 +118,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -137,6 +146,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -146,13 +168,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -174,6 +200,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -203,6 +237,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -213,6 +251,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -252,6 +293,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -268,14 +316,12 @@ spec: tasks: - arguments: parameters: - - name: task - value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-importer"},"inputs":{"parameters":{"uri":{"runtimeValue":{"constant":"gs://ml-pipeline-playground/shakespeare1.txt"}}}},"taskInfo":{"name":"importer"}}' - - name: component - value: '{{workflow.parameters.components-comp-importer}}' + - name: task-name + value: importer - name: importer value: '{{workflow.parameters.implementations-comp-importer}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: importer template: system-importer - arguments: @@ -288,8 +334,8 @@ spec: value: '{{workflow.parameters.implementations-4351a2bfc0420795b356dad17db637309898c44eae821502eab5e2c753a1cddb}}' - name: task-name value: make-language-model - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: importer.Succeeded name: make-language-model-driver template: system-container-driver @@ -305,7 +351,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -321,8 +367,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -333,8 +379,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -347,6 +393,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -356,6 +415,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -365,8 +428,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -375,9 +438,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -386,6 +449,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -400,8 +471,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pythonic_artifacts_test_pipeline.yaml b/test_data/compiled-workflows/pythonic_artifacts_test_pipeline.yaml index b61a359e629..768bb228a39 100644 --- a/test_data/compiled-workflows/pythonic_artifacts_test_pipeline.yaml +++ b/test_data/compiled-workflows/pythonic_artifacts_test_pipeline.yaml @@ -60,8 +60,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -88,6 +88,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -97,13 +110,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -125,6 +142,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -154,6 +179,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -164,6 +193,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -203,6 +235,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -227,8 +266,8 @@ spec: value: '{{workflow.parameters.implementations-c192368c8461c3cdeb1a1c1ce2c7130e644b68af5a8e9b05afbaa3aac70143cc}}' - name: task-name value: gen-data - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: gen-data-driver template: system-container-driver - arguments: @@ -251,8 +290,8 @@ spec: value: '{{workflow.parameters.implementations-d051e68c28d78cde70e680b1fdf49912c1eed8b335b05d1d4518af362cc097f5}}' - name: task-name value: train-model - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: gen-data.Succeeded name: train-model-driver template: system-container-driver @@ -268,7 +307,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -284,8 +323,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -296,8 +335,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -310,6 +349,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -319,6 +371,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -328,8 +384,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -338,9 +394,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -349,6 +405,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -363,8 +427,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pythonic_artifacts_with_list_of_artifacts.yaml b/test_data/compiled-workflows/pythonic_artifacts_with_list_of_artifacts.yaml index f75f2c462c1..08d3c0811a1 100644 --- a/test_data/compiled-workflows/pythonic_artifacts_with_list_of_artifacts.yaml +++ b/test_data/compiled-workflows/pythonic_artifacts_with_list_of_artifacts.yaml @@ -61,8 +61,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -89,6 +89,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -98,13 +111,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -126,6 +143,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -155,6 +180,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -165,6 +194,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -204,6 +236,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -228,8 +267,10 @@ spec: value: '{{workflow.parameters.implementations-00785a58f9cf0df219dc62ecbba2450770628d2c6d43751435fa5c1b9a479637}}' - name: task-name value: make-dataset - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' name: make-dataset-driver template: system-container-driver - arguments: @@ -239,12 +280,15 @@ spec: - default: "false" name: cached-decision value: '{{tasks.make-dataset-driver.outputs.parameters.cached-decision}}' + - name: iteration-index + value: '{{inputs.parameters.iteration-index}}' depends: make-dataset-driver.Succeeded name: make-dataset template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id + - name: iteration-index metadata: {} name: comp-for-loop-1 outputs: {} @@ -260,8 +304,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -272,8 +316,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -286,6 +330,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -295,6 +352,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -304,8 +365,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -314,9 +375,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -325,62 +386,42 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: parameters: - name: component value: '{{workflow.parameters.components-comp-for-loop-1}}' - - name: iteration-index - value: '{{inputs.parameters.iteration-index}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' - - name: task - value: '{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--texts":{"componentInputParameter":"texts"}}},"parameterIterator":{"itemInput":"pipelinechannel--texts-loop-item","items":{"inputParameter":"pipelinechannel--texts"}},"taskInfo":{"name":"for-loop-1"}}' - name: iteration-item-driver - template: system-dag-driver - - arguments: - parameters: - - name: parent-dag-id - value: '{{tasks.iteration-item-driver.outputs.parameters.execution-id}}' - - name: condition - value: '{{tasks.iteration-item-driver.outputs.parameters.condition}}' - depends: iteration-item-driver.Succeeded - name: iteration-item - template: comp-for-loop-1 - inputs: - parameters: - - name: parent-dag-id - - name: iteration-index - metadata: {} - name: comp-for-loop-1-iteration - outputs: {} - - dag: - tasks: - - arguments: - parameters: - - name: component - value: '{{workflow.parameters.components-comp-for-loop-1}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"componentRef":{"name":"comp-for-loop-1"},"inputs":{"parameters":{"pipelinechannel--texts":{"componentInputParameter":"texts"}}},"parameterIterator":{"itemInput":"pipelinechannel--texts-loop-item","items":{"inputParameter":"pipelinechannel--texts"}},"taskInfo":{"name":"for-loop-1"}}' + - name: task-name + value: for-loop-1 name: iteration-driver template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.iteration-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.iteration-driver.outputs.parameters.parent-dag-task-id-path}}' - name: iteration-index value: '{{item}}' depends: iteration-driver.Succeeded name: iteration-iterations - template: comp-for-loop-1-iteration + template: comp-for-loop-1 withSequence: count: '{{tasks.iteration-driver.outputs.parameters.iteration-count}}' inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-for-loop-1-for-loop-1-iterator outputs: {} @@ -388,8 +429,8 @@ spec: tasks: - arguments: parameters: - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: for-loop-1 template: comp-for-loop-1-for-loop-1-iterator - arguments: @@ -402,8 +443,8 @@ spec: value: '{{workflow.parameters.implementations-c10c6ce95f2c2e5236bcfc27f308556c66fb19eed301c24c3b93be8a1998b8d3}}' - name: task-name value: join-datasets - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: for-loop-1.Succeeded name: join-datasets-driver template: system-container-driver @@ -419,7 +460,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -437,8 +478,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/pythonic_artifacts_with_multiple_returns.yaml b/test_data/compiled-workflows/pythonic_artifacts_with_multiple_returns.yaml index 323901b6e43..eb8132e787b 100644 --- a/test_data/compiled-workflows/pythonic_artifacts_with_multiple_returns.yaml +++ b/test_data/compiled-workflows/pythonic_artifacts_with_multiple_returns.yaml @@ -66,8 +66,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -94,6 +94,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -103,13 +116,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -131,6 +148,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -160,6 +185,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -170,6 +199,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -209,6 +241,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -233,8 +272,8 @@ spec: value: '{{workflow.parameters.implementations-a1cab77dea9e6edee97745b998070da20716dcf0eee94102ec9bfac02152a267}}' - name: task-name value: dataset-splitter - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: dataset-splitter-driver template: system-container-driver - arguments: @@ -249,7 +288,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: comp-splitter-pipeline outputs: {} @@ -265,8 +304,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -277,8 +316,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -291,6 +330,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -300,6 +352,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -309,8 +365,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -319,9 +375,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -330,6 +386,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -342,8 +406,8 @@ spec: value: '{{workflow.parameters.implementations-8985a8fb7d17ffd865e926bd80a135f8c5e0ff2fcc20f64013705bc6175ad440}}' - name: task-name value: make-dataset - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: make-dataset-driver template: system-container-driver - arguments: @@ -360,8 +424,8 @@ spec: parameters: - name: component value: '{{workflow.parameters.components-comp-splitter-pipeline}}' - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' - name: task value: '{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-splitter-pipeline"},"dependentTasks":["make-dataset"],"inputs":{"artifacts":{"in_dataset":{"taskOutputArtifact":{"outputArtifactKey":"Output","producerTask":"make-dataset"}}}},"taskInfo":{"name":"splitter-pipeline"}}' - name: task-name @@ -371,8 +435,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.splitter-pipeline-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.splitter-pipeline-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: '{{tasks.splitter-pipeline-driver.outputs.parameters.condition}}' depends: splitter-pipeline-driver.Succeeded @@ -380,7 +444,7 @@ spec: template: comp-splitter-pipeline inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -398,8 +462,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/ray_integration_compiled.yaml b/test_data/compiled-workflows/ray_integration_compiled.yaml index dd29bfbbfc3..2751e156d95 100644 --- a/test_data/compiled-workflows/ray_integration_compiled.yaml +++ b/test_data/compiled-workflows/ray_integration_compiled.yaml @@ -55,8 +55,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -83,6 +83,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -92,13 +105,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -120,6 +137,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -149,6 +174,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -159,6 +188,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -198,6 +230,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -222,8 +261,8 @@ spec: value: '{{workflow.parameters.implementations-e324569752b9b88e93853985e0f3840219baf7ccad191a166bd7e23ebcd3b5e4}}' - name: task-name value: ray-fn - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: ray-fn-driver template: system-container-driver - arguments: @@ -238,7 +277,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -254,8 +293,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -266,8 +305,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -280,6 +319,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -289,6 +341,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -298,8 +354,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -308,9 +364,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -319,6 +375,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -333,8 +397,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/ray_job_integration_compiled.yaml b/test_data/compiled-workflows/ray_job_integration_compiled.yaml index b47c832e143..7cb7cd285bf 100644 --- a/test_data/compiled-workflows/ray_job_integration_compiled.yaml +++ b/test_data/compiled-workflows/ray_job_integration_compiled.yaml @@ -168,8 +168,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -196,6 +196,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -205,13 +218,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -233,6 +250,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -262,6 +287,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -272,6 +301,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -311,6 +343,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -335,8 +374,8 @@ spec: value: '{{workflow.parameters.implementations-95a95a4327246937049bccce9f1ac13270fef5003171f5e78e231b5aab3aee6d}}' - name: task-name value: ray-fn - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: ray-fn-driver template: system-container-driver - arguments: @@ -351,7 +390,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -367,8 +406,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -379,8 +418,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -393,6 +432,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -402,6 +454,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -411,8 +467,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -421,9 +477,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -432,6 +488,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -446,8 +510,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/sequential_v1.yaml b/test_data/compiled-workflows/sequential_v1.yaml index f04ffd41fa3..3d15f92b756 100644 --- a/test_data/compiled-workflows/sequential_v1.yaml +++ b/test_data/compiled-workflows/sequential_v1.yaml @@ -32,8 +32,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -60,6 +60,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -69,13 +82,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -97,6 +114,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -126,6 +151,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -136,6 +165,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -175,6 +207,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -199,8 +238,8 @@ spec: value: '{{workflow.parameters.implementations-eaac86761cafbf4600ca777dc7022d3e2c469dd34f22ba18fcd4b75a384b615d}}' - name: task-name value: echo - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: echo-driver template: system-container-driver - arguments: @@ -223,8 +262,8 @@ spec: value: '{{workflow.parameters.implementations-eaac86761cafbf4600ca777dc7022d3e2c469dd34f22ba18fcd4b75a384b615d}}' - name: task-name value: echo-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: echo-2-driver template: system-container-driver - arguments: @@ -239,7 +278,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -255,8 +294,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -267,8 +306,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -281,6 +320,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -290,6 +342,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -299,8 +355,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -309,9 +365,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -320,6 +376,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -334,8 +398,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/sequential_v2.yaml b/test_data/compiled-workflows/sequential_v2.yaml index edcbf7c9fe4..017fd99140c 100644 --- a/test_data/compiled-workflows/sequential_v2.yaml +++ b/test_data/compiled-workflows/sequential_v2.yaml @@ -36,8 +36,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -64,6 +64,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -73,13 +86,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -101,6 +118,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -130,6 +155,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -140,6 +169,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -179,6 +211,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -203,8 +242,8 @@ spec: value: '{{workflow.parameters.implementations-b5daddb26b6451a9de844c9aa33f01e8c04af37d7ac3dcd260612d1979b6a08b}}' - name: task-name value: download - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: download-driver template: system-container-driver - arguments: @@ -227,8 +266,8 @@ spec: value: '{{workflow.parameters.implementations-db4626120aacf0a2094a96782ad9a93dcdc01de0abd3898f2ec96001765c7d47}}' - name: task-name value: echo - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: download.Succeeded name: echo-driver template: system-container-driver @@ -244,7 +283,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -260,8 +299,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -272,8 +311,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -286,6 +325,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -295,6 +347,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -304,8 +360,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -314,9 +370,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -325,6 +381,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -339,8 +403,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/take_nap_compiled.yaml b/test_data/compiled-workflows/take_nap_compiled.yaml index 427b7aa5e96..6e772b7b2a9 100644 --- a/test_data/compiled-workflows/take_nap_compiled.yaml +++ b/test_data/compiled-workflows/take_nap_compiled.yaml @@ -57,8 +57,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -85,6 +85,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -94,13 +107,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -122,6 +139,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -151,6 +176,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -161,6 +190,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -200,6 +232,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -224,8 +263,8 @@ spec: value: '{{workflow.parameters.implementations-548fb7a269f053e3060d5a7b3eee8d609d93eccb623ab5a9b4913b8e62a3d71c}}' - name: task-name value: take-nap - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: take-nap-driver template: system-container-driver - arguments: @@ -248,8 +287,8 @@ spec: value: '{{workflow.parameters.implementations-578a199df014da25449b2880c39c343872ba3c2c790e062fd52bbfd871d14775}}' - name: task-name value: wake-up - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: take-nap.Succeeded name: wake-up-driver template: system-container-driver @@ -265,7 +304,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -281,8 +320,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -293,8 +332,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -307,6 +346,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -316,6 +368,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -325,8 +381,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -335,9 +391,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -346,6 +402,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -360,8 +424,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/take_nap_pipeline_root_compiled.yaml b/test_data/compiled-workflows/take_nap_pipeline_root_compiled.yaml index 427b7aa5e96..6e772b7b2a9 100644 --- a/test_data/compiled-workflows/take_nap_pipeline_root_compiled.yaml +++ b/test_data/compiled-workflows/take_nap_pipeline_root_compiled.yaml @@ -57,8 +57,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -85,6 +85,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -94,13 +107,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -122,6 +139,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -151,6 +176,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -161,6 +190,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -200,6 +232,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -224,8 +263,8 @@ spec: value: '{{workflow.parameters.implementations-548fb7a269f053e3060d5a7b3eee8d609d93eccb623ab5a9b4913b8e62a3d71c}}' - name: task-name value: take-nap - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: take-nap-driver template: system-container-driver - arguments: @@ -248,8 +287,8 @@ spec: value: '{{workflow.parameters.implementations-578a199df014da25449b2880c39c343872ba3c2c790e062fd52bbfd871d14775}}' - name: task-name value: wake-up - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: take-nap.Succeeded name: wake-up-driver template: system-container-driver @@ -265,7 +304,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -281,8 +320,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -293,8 +332,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -307,6 +346,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -316,6 +368,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -325,8 +381,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -335,9 +391,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -346,6 +402,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -360,8 +424,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/two_step_pipeline.yaml b/test_data/compiled-workflows/two_step_pipeline.yaml index 600448ce03b..55b126c8462 100644 --- a/test_data/compiled-workflows/two_step_pipeline.yaml +++ b/test_data/compiled-workflows/two_step_pipeline.yaml @@ -37,8 +37,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -65,6 +65,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -74,13 +87,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -102,6 +119,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -131,6 +156,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -141,6 +170,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -180,6 +212,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -204,8 +243,8 @@ spec: value: '{{workflow.parameters.implementations-27a1ed0385660429e07f843557e4216aa3a45bcf5cf5dc468af6aab6e63d1c5e}}' - name: task-name value: read-from-gcs - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: write-to-gcs.Succeeded name: read-from-gcs-driver template: system-container-driver @@ -229,8 +268,8 @@ spec: value: '{{workflow.parameters.implementations-b290238f9f93331c1d7d9f765948cdc6af4420f14f5ebd4ee26539c3055a1493}}' - name: task-name value: write-to-gcs - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: write-to-gcs-driver template: system-container-driver - arguments: @@ -245,7 +284,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -261,8 +300,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -273,8 +312,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -287,6 +326,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -296,6 +348,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -305,8 +361,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -315,9 +371,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -326,6 +382,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -340,8 +404,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/two_step_pipeline_containerized.yaml b/test_data/compiled-workflows/two_step_pipeline_containerized.yaml index 316094c70cc..518050c3291 100644 --- a/test_data/compiled-workflows/two_step_pipeline_containerized.yaml +++ b/test_data/compiled-workflows/two_step_pipeline_containerized.yaml @@ -38,8 +38,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -66,6 +66,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -75,13 +88,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -103,6 +120,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -132,6 +157,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -142,6 +171,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -181,6 +213,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -205,8 +244,8 @@ spec: value: '{{workflow.parameters.implementations-475bac6023d1f786e45ff58ef0f9941e93c80c64a148e28d3506f4a41ec6e102}}' - name: task-name value: component1 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: component1-driver template: system-container-driver - arguments: @@ -229,8 +268,8 @@ spec: value: '{{workflow.parameters.implementations-a251b98b01e63019fbe86cccecda6133320b517f80c2594d7119857a86df7195}}' - name: task-name value: component2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: component1.Succeeded name: component2-driver template: system-container-driver @@ -246,7 +285,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -262,8 +301,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -274,8 +313,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -288,6 +327,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -297,6 +349,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -306,8 +362,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -316,9 +372,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -327,6 +383,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -341,8 +405,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/upload_download_compiled.yaml b/test_data/compiled-workflows/upload_download_compiled.yaml index 230ca4cf796..1133c43c809 100644 --- a/test_data/compiled-workflows/upload_download_compiled.yaml +++ b/test_data/compiled-workflows/upload_download_compiled.yaml @@ -96,8 +96,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -124,6 +124,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -133,13 +146,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -161,6 +178,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -190,6 +215,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -200,6 +229,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -239,6 +271,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -263,8 +302,8 @@ spec: value: '{{workflow.parameters.implementations-c33fa3b99654c0449406568c8f79055a726bdfef6f12ef6bdd2fff0e1cc32bd0}}' - name: task-name value: receive-file - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: send-file.Succeeded name: receive-file-driver template: system-container-driver @@ -288,8 +327,8 @@ spec: value: '{{workflow.parameters.implementations-21623f5d1d15354c4b58c256e42c5e4fcabb7503930b4a6f4ace6876d023a660}}' - name: task-name value: send-file - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: send-file-driver template: system-container-driver - arguments: @@ -312,8 +351,8 @@ spec: value: '{{workflow.parameters.implementations-c222529d08b231cbea2debe39f47b0c9f12c39e63429d94017fceeb53226b68c}}' - name: task-name value: test-uploaded-artifact - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: receive-file.Succeeded name: test-uploaded-artifact-driver template: system-container-driver @@ -329,7 +368,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -345,8 +384,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -357,8 +396,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -371,6 +410,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -380,6 +432,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -389,8 +445,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -399,9 +455,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -410,6 +466,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -424,8 +488,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded diff --git a/test_data/compiled-workflows/xgboost_sample_pipeline.yaml b/test_data/compiled-workflows/xgboost_sample_pipeline.yaml index ac7773d3df4..27b95913039 100644 --- a/test_data/compiled-workflows/xgboost_sample_pipeline.yaml +++ b/test_data/compiled-workflows/xgboost_sample_pipeline.yaml @@ -300,8 +300,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -328,6 +328,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -337,13 +350,17 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component - name: task - name: container - name: task-name - - name: parent-dag-id + - name: parent-dag-task-id - default: "-1" name: iteration-index - default: "" @@ -365,6 +382,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -394,6 +419,10 @@ spec: valueFrom: fieldRef: fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace envFrom: - configMapRef: name: metadata-grpc-configmap @@ -404,6 +433,9 @@ spec: volumeMounts: - mountPath: /kfp-launcher name: kfp-launcher + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true - mountPath: /gcs name: gcs-scratch - mountPath: /s3 @@ -443,6 +475,13 @@ spec: volumes: - emptyDir: {} name: kfp-launcher + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - emptyDir: {} name: gcs-scratch - emptyDir: {} @@ -468,8 +507,8 @@ spec: value: '{{workflow.parameters.implementations-7ad329a2ac89be6e1debabcd3d4912ec1f9f56f8d4b9fe479da5dd1ed4fc45e2}}' - name: task-name value: chicago-taxi-trips-dataset - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' name: chicago-taxi-trips-dataset-driver template: system-container-driver - arguments: @@ -492,8 +531,8 @@ spec: value: '{{workflow.parameters.implementations-4c133da6a0e29aa7910d0718119ec596bd6681678360ab60e38dae9339b3efb9}}' - name: task-name value: convert-csv-to-apache-parquet - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: chicago-taxi-trips-dataset.Succeeded name: convert-csv-to-apache-parquet-driver template: system-container-driver @@ -517,8 +556,8 @@ spec: value: '{{workflow.parameters.implementations-d1f109ba389501999e4d468354015c7d3a7947affcf7e4697c201c9c1bd6f88b}}' - name: task-name value: xgboost-predict - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: chicago-taxi-trips-dataset.Succeeded && xgboost-train.Succeeded name: xgboost-predict-driver template: system-container-driver @@ -542,8 +581,8 @@ spec: value: '{{workflow.parameters.implementations-c62f35abca448022418477855ae1839ecc171d48bb1830ab69e1f75c0649519a}}' - name: task-name value: xgboost-predict-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: convert-csv-to-apache-parquet.Succeeded && xgboost-train-2.Succeeded name: xgboost-predict-2-driver template: system-container-driver @@ -567,8 +606,8 @@ spec: value: '{{workflow.parameters.implementations-c62f35abca448022418477855ae1839ecc171d48bb1830ab69e1f75c0649519a}}' - name: task-name value: xgboost-predict-3 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: convert-csv-to-apache-parquet.Succeeded && xgboost-train.Succeeded name: xgboost-predict-3-driver template: system-container-driver @@ -592,8 +631,8 @@ spec: value: '{{workflow.parameters.implementations-d1f109ba389501999e4d468354015c7d3a7947affcf7e4697c201c9c1bd6f88b}}' - name: task-name value: xgboost-predict-4 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: chicago-taxi-trips-dataset.Succeeded && xgboost-train-2.Succeeded name: xgboost-predict-4-driver template: system-container-driver @@ -617,8 +656,8 @@ spec: value: '{{workflow.parameters.implementations-0543e77f07b487833b7b7adaedea608d72a11fd2a76cfe13a80ea1431ce92901}}' - name: task-name value: xgboost-train - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: chicago-taxi-trips-dataset.Succeeded name: xgboost-train-driver template: system-container-driver @@ -642,8 +681,8 @@ spec: value: '{{workflow.parameters.implementations-0c2f0a07f4057d4f8388562ccd9bbf6ea50c2603b7efb45cadb1ddc872a26cb3}}' - name: task-name value: xgboost-train-2 - - name: parent-dag-id - value: '{{inputs.parameters.parent-dag-id}}' + - name: parent-dag-task-id + value: '{{inputs.parameters.parent-dag-task-id}}' depends: convert-csv-to-apache-parquet.Succeeded name: xgboost-train-2-driver template: system-container-driver @@ -659,7 +698,7 @@ spec: template: system-container-executor inputs: parameters: - - name: parent-dag-id + - name: parent-dag-task-id metadata: {} name: root outputs: {} @@ -675,8 +714,8 @@ spec: - '{{workflow.name}}' - --run_display_name - "" - - --dag_execution_id - - '{{inputs.parameters.parent-dag-id}}' + - --parent_task_id + - '{{inputs.parameters.parent-dag-task-id}}' - --component - '{{inputs.parameters.component}}' - --task @@ -687,8 +726,8 @@ spec: - '{{inputs.parameters.runtime-config}}' - --iteration_index - '{{inputs.parameters.iteration-index}}' - - --execution_id_path - - '{{outputs.parameters.execution-id.path}}' + - --parent_task_id_path + - '{{outputs.parameters.parent-dag-task-id-path.path}}' - --iteration_count_path - '{{outputs.parameters.iteration-count.path}}' - --condition_path @@ -701,6 +740,19 @@ spec: - "" command: - driver + env: + - name: KFP_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: KFP_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace image: ghcr.io/kubeflow/kfp-driver:latest name: "" resources: @@ -710,6 +762,10 @@ spec: requests: cpu: 100m memory: 64Mi + volumeMounts: + - mountPath: /var/run/secrets/kfp + name: kfp-launcher-token + readOnly: true inputs: parameters: - name: component @@ -719,8 +775,8 @@ spec: name: task - default: "" name: task-name - - default: "0" - name: parent-dag-id + - default: "" + name: parent-dag-task-id - default: "-1" name: iteration-index - default: DAG @@ -729,9 +785,9 @@ spec: name: system-dag-driver outputs: parameters: - - name: execution-id + - name: parent-dag-task-id-path valueFrom: - path: /tmp/outputs/execution-id + path: /tmp/outputs/task-id - name: iteration-count valueFrom: default: "0" @@ -740,6 +796,14 @@ spec: valueFrom: default: "true" path: /tmp/outputs/condition + volumes: + - name: kfp-launcher-token + projected: + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 7200 + path: token - dag: tasks: - arguments: @@ -754,8 +818,8 @@ spec: template: system-dag-driver - arguments: parameters: - - name: parent-dag-id - value: '{{tasks.root-driver.outputs.parameters.execution-id}}' + - name: parent-dag-task-id + value: '{{tasks.root-driver.outputs.parameters.parent-dag-task-id-path}}' - name: condition value: "" depends: root-driver.Succeeded